lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | mpl-2.0 | f499fe8d341cc7538413322d4c6fbf5be7e0535d | 0 | teleivo/openmrs-module-radiologydcm4chee,openmrs/openmrs-module-radiologydcm4chee,openmrs/openmrs-module-radiologydcm4chee,teleivo/openmrs-module-radiologydcm4chee | /**
* This Source Code Form is subject to the terms of the Mozilla Public License,
* v. 2.0. If a copy of the MPL was not distributed with this file, You can
* obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under
* the terms of the Healthcare Disclaimer located at http://openmrs.org/license.
*
* Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS
* graphic logo is a trademark of OpenMRS Inc.
*/
package org.openmrs.module.radiology.report.template;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.openmrs.ConceptReferenceTerm;
import org.openmrs.ConceptSource;
import org.openmrs.api.APIException;
import org.openmrs.api.AdministrationService;
import org.openmrs.api.context.Context;
import org.openmrs.module.radiology.RadiologyConstants;
import org.openmrs.module.radiology.RadiologyProperties;
import org.openmrs.test.BaseModuleContextSensitiveTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
/**
* Tests {@code MrrtReportTemplateService}.
*/
public class MrrtReportTemplateServiceComponentTest extends BaseModuleContextSensitiveTest {
@Autowired
@Qualifier("adminService")
private AdministrationService administrationService;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Autowired
private RadiologyProperties radiologyProperties;
@Autowired
private MrrtReportTemplateFileParser parser;
private static final String TEST_DATASET =
"org/openmrs/module/radiology/include/MrrtReportTemplateServiceComponentTestDataset.xml";
private static final int EXISTING_TEMPLATE_ID = 1;
private static final int NON_EXISTING_TEMPLATE_ID = 23;
private static final String NON_EXISTING_UUID = "invalid uuid";
private static final String EXISTING_TEMPLATE_TITLE = "title1";
private static final String NON_EXISTING_TEMPLATE_TITLE = "invalid";
private static final String TEMPLATE_IDENTIFIER = "1.3.6.1.4.1.21367.13.199.1015";
private static final String NON_EXISTING_PUBLISHER = "Non existing publisher";
private static final String UUID_FOR_TEMPLATE_ONE = "aa551445-def0-4f93-9047-95f0a9afbdce";
private static final String UUID_FOR_TEMPLATE_TWO = "59273e52-33b1-4fcb-8c1f-9b670bb11259";
@Autowired
private MrrtReportTemplateService mrrtReportTemplateService;
@Before
public void setUp() throws Exception {
executeDataSet(TEST_DATASET);
}
/**
* Get a files content as string.
*
* @param path the path to get the file content from
* @return the file content
*/
private String getFileContent(String path) throws IOException {
File file = getFile(path);
return getString(file);
}
/**
* Get a file from the test resources.
*
* @param path the path to get the file from
* @return the file on given path
*/
private File getFile(String path) {
return new File(getClass().getClassLoader()
.getResource(path)
.getFile());
}
/**
* Get a file from the test resources.
*
* @param file the file to get the content from
* @return the file content
*/
private String getString(File file) throws IOException {
String content = null;
try (InputStream in = new FileInputStream(file)) {
content = IOUtils.toString(in);
}
return content;
}
/**
* Sets up the global property defining the MRRT template directory using junits temporary folder.
*
* @throws IOException
*/
private void setUpTemporaryFolder() throws IOException {
File tempFolder = temporaryFolder.newFolder("/mrrt_templates");
administrationService.setGlobalProperty(RadiologyConstants.GP_MRRT_REPORT_TEMPLATE_DIR,
tempFolder.getAbsolutePath());
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplate(Integer)
* @verifies get template with given id
*/
@Test
public void getMrrtReportTemplate_shouldGetTemplateWithGivenId() throws Exception {
MrrtReportTemplate existingTemplate = mrrtReportTemplateService.getMrrtReportTemplate(EXISTING_TEMPLATE_ID);
assertNotNull(existingTemplate);
assertEquals(existingTemplate.getCharset(), "UTF-8");
assertEquals(existingTemplate.getDcTermsTitle(), "title1");
assertEquals(existingTemplate.getDcTermsLanguage(), "en");
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplate(Integer)
* @verifies return null if no match was found
*/
@Test
public void getMrrtReportTemplate_shouldReturnNullIfNoMatchWasFound() throws Exception {
assertNull(mrrtReportTemplateService.getMrrtReportTemplate(NON_EXISTING_TEMPLATE_ID));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplate(Integer)
* @verifies throw illegal argument exception if given null
*/
@Test
public void getMrrtReportTemplate_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("id cannot be null");
mrrtReportTemplateService.getMrrtReportTemplate(null);
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByUuid(String)
* @verifies find object given existing uuid
*/
@Test
public void getMrrtReportTemplateByUuid_shouldFindObjectGivenExistingUuid() {
MrrtReportTemplate valid = mrrtReportTemplateService.getMrrtReportTemplateByUuid(UUID_FOR_TEMPLATE_ONE);
assertNotNull(valid);
assertThat(valid.getTemplateId(), is(EXISTING_TEMPLATE_ID));
assertThat(valid.getUuid(), is(UUID_FOR_TEMPLATE_ONE));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByUuid(String)
* @verifies return null if no object found with given uuid
*/
@Test
public void getMrrtReportTemplateByUuid_shouldReturnNullIfNoObjectFoundWithGivenUuid() {
assertNull(mrrtReportTemplateService.getMrrtReportTemplateByUuid(NON_EXISTING_UUID));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByUuid(String)
* @verifies throw illegal argument exception if given null
*/
@Test
public void getMrrtReportTemplateByUuid_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("uuid cannot be null");
mrrtReportTemplateService.getMrrtReportTemplateByUuid(null);
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByIdentifier(String)
* @verifies find object given valid identifier
*/
@Test
public void getMrrtReportTemplateByIdentifier_shouldFindObjectWithGivenIdentifier() throws Exception {
MrrtReportTemplate template = mrrtReportTemplateService.getMrrtReportTemplateByIdentifier("identifier1");
assertNotNull(template);
assertThat(template.getDcTermsIdentifier(), is("identifier1"));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByIdentifier(String)
* @verifies return null if no object found with give identifier
*/
@Test
public void getMrrtReportTemplateByIdentifier_shouldReturnNullIfNoObjectFoundWithGivenIdentifier() throws Exception {
assertNull(mrrtReportTemplateService.getMrrtReportTemplateByIdentifier("invalid identifier"));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByIdentifier(String)
* @verifies throw illegal argument exception if given null
*/
@Test
public void getMrrtReportTemplateByIdentifier_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("identifier cannot be null");
mrrtReportTemplateService.getMrrtReportTemplateByIdentifier(null);
}
/**
* @see MrrtReportTemplateService#purgeMrrtReportTemplate(MrrtReportTemplate)
* @verifies delete report template from database and also delete template file from the system
*/
@Test
public void purgeMrrtReportTemplate_shouldDeleteReportTemplateFromDatabaseAndAlsoDeleteTemplateFileFromTheSystem()
throws Exception {
setUpTemporaryFolder();
MrrtReportTemplate template = new MrrtReportTemplate();
File templateFile = new File(radiologyProperties.getReportTemplateHome(), java.util.UUID.randomUUID()
.toString());
templateFile.createNewFile();
template.setDcTermsTitle("sample title");
template.setDcTermsDescription("sample description");
template.setDcTermsIdentifier("identifier3");
template.setPath(templateFile.getAbsolutePath());
MrrtReportTemplate saved = mrrtReportTemplateService.saveMrrtReportTemplate(template);
assertNotNull(saved.getId());
File savedFile = new File(saved.getPath());
assertThat(savedFile.exists(), is(true));
mrrtReportTemplateService.purgeMrrtReportTemplate(saved);
assertNull(mrrtReportTemplateService.getMrrtReportTemplate(saved.getId()));
assertThat(savedFile.exists(), is(false));
}
/**
* @see MrrtReportTemplateService#purgeMrrtReportTemplate(MrrtReportTemplate)
* @verifies throw illegal argument exception if given null
*/
@Test
public void purgeMrrtReportTemplate_shouldThrowIlligalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("template cannot be null");
mrrtReportTemplateService.purgeMrrtReportTemplate(null);
}
/**
* @see MrrtReportTemplateService#purgeMrrtReportTemplate(MrrtReportTemplate)
* @verifies catch file not found exception when the file been deleted is missing
*/
@Test
public void purgeMrrtReportTemplate_shouldCatchFileNotFoundExceptionWhenTheFileBeenDeletedIsMissing() {
MrrtReportTemplate template = mrrtReportTemplateService.getMrrtReportTemplate(1);
assertNotNull(template);
assertThat(new File(template.getPath()).exists(), is(false));
mrrtReportTemplateService.purgeMrrtReportTemplate(template);
}
/**
* @see MrrtReportTemplateService#saveMrrtReportTemplate(MrrtReportTemplate)
* @verifies save given template
*/
@Test
public void saveMrrtReportTemplate_shouldSaveGivenTemplate() throws Exception {
MrrtReportTemplate template = new MrrtReportTemplate();
template.setDcTermsTitle("sample title");
template.setDcTermsDescription("sample description");
template.setDcTermsIdentifier("identifier3");
MrrtReportTemplate saved = mrrtReportTemplateService.saveMrrtReportTemplate(template);
MrrtReportTemplate newTemplate = mrrtReportTemplateService.getMrrtReportTemplate(saved.getTemplateId());
assertNotNull(saved);
assertNotNull(newTemplate);
assertEquals(newTemplate.getDcTermsTitle(), template.getDcTermsTitle());
assertEquals(newTemplate.getDcTermsDescription(), template.getDcTermsDescription());
}
/**
* @see MrrtReportTemplateService#saveMrrtReportTemplate(MrrtReportTemplate)
* @verifies throw api exception if saving template that already exists
*/
@Test
public void saveMrrtReportTemplate_shouldThrowApiExceptionIfSavingTemplateThatAlreadyExists() throws Exception {
MrrtReportTemplate existing = mrrtReportTemplateService.getMrrtReportTemplate(EXISTING_TEMPLATE_ID);
existing.setDcTermsTitle("modified");
expectedException.expect(APIException.class);
expectedException.expectMessage("Template already exist in the system.");
mrrtReportTemplateService.saveMrrtReportTemplate(existing);
}
/**
* @see MrrtReportTemplateService#saveMrrtReportTemplate(MrrtReportTemplate)
* @verifies throw illegal argument exception if given null
*/
@Test
public void saveMrrtReportTemplate_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("template cannot be null");
mrrtReportTemplateService.saveMrrtReportTemplate(null);
}
/**
* @verifies create mrrt report template in the database and on the file system
* @see MrrtReportTemplateService#importMrrtReportTemplate(String)
*/
@Test
public void importMrrtReportTemplate_shouldCreateMrrtReportTemplateInTheDatabaseAndOnTheFileSystem() throws Exception {
setUpTemporaryFolder();
String sourcePath = "mrrttemplates/ihe/connectathon/2015/CTChestAbdomen.html";
String template = getFileContent(sourcePath);
MrrtReportTemplate saved = mrrtReportTemplateService.importMrrtReportTemplate(template);
assertNotNull(saved);
assertThat(saved.getDcTermsIdentifier(), is(TEMPLATE_IDENTIFIER));
File templateHome = radiologyProperties.getReportTemplateHome();
File templatePath = new File(saved.getPath());
assertThat(templatePath.getParentFile()
.getName(),
is(templateHome.getName()));
assertTrue(FileUtils.contentEquals(getFile(sourcePath), templatePath.getAbsoluteFile()));
}
/**
* @verifies not create an mrrt report template in the database and store the template as file if given template is invalid
* @see MrrtReportTemplateService#importMrrtReportTemplate(String)
*/
@Test
public void
importMrrtReportTemplate_shouldNotCreateAnMrrtReportTemplateInTheDatabaseAndStoreTheTemplateAsFileIfGivenTemplateIsInvalid()
throws Exception {
setUpTemporaryFolder();
String template = getFileContent(
"mrrttemplates/ihe/connectathon/2015/invalidMrrtReportTemplate-noMetaElementWithCharsetAttribute.html");
expectedException.expect(APIException.class);
mrrtReportTemplateService.importMrrtReportTemplate(template);
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies return all mrrt report templates that match given title search query if title is specified
*/
@Test
public void getMrrtReportTemplates_shouldReturnAllMrrtReportTemplatesThatMatchGivenTitleSearchQueryIfTitleIsSpecified()
throws Exception {
MrrtReportTemplateSearchCriteria searchCriteria =
new MrrtReportTemplateSearchCriteria.Builder().withTitle(EXISTING_TEMPLATE_TITLE)
.build();
List<MrrtReportTemplate> templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
assertNotNull(templates);
assertThat(templates.size(), is(1));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies return an empty list of no match for title was found
*/
@Test
public void getMrrtRepdortTemplates_shouldReturnAnEmptyListOfNoMatchForTitleWasFound() throws Exception {
MrrtReportTemplateSearchCriteria searchCriteria =
new MrrtReportTemplateSearchCriteria.Builder().withTitle(NON_EXISTING_TEMPLATE_TITLE)
.build();
List<MrrtReportTemplate> templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
assertNotNull(templates);
assertTrue(templates.isEmpty());
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies throw illegal argument exception of given null
*/
@Test
public void getMrrtReportTemplates_shouldThrowIllegalArgumentExceptionOfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("mrrtReportTemplateSearchCriteria cannot be null");
mrrtReportTemplateService.getMrrtReportTemplates(null);
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies return all mrrt report templates that match given publisher anywhere in dcterms publisher insensitive to case
*/
@Test
public void
getMrrtReportTemplates_shouldReturnAllMrrtReportTemplatesThatMatchGivenPublisherAnywhereInDctermsPublisherInsensitiveToCase()
throws Exception {
String partialPublisherString = "cat";
MrrtReportTemplateSearchCriteria searchCriteria =
new MrrtReportTemplateSearchCriteria.Builder().withPublisher(partialPublisherString)
.build();
List<MrrtReportTemplate> templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
assertNotNull(templates);
assertThat(templates.size(), is(2));
for (MrrtReportTemplate template : templates) {
assertThat(template.getDcTermsPublisher()
.toLowerCase(),
containsString(partialPublisherString));
}
String exactPublisherString = "IHE CAT Publisher";
searchCriteria = new MrrtReportTemplateSearchCriteria.Builder().withPublisher(exactPublisherString)
.build();
templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
assertNotNull(templates);
assertThat(templates.size(), is(1));
for (MrrtReportTemplate template : templates) {
assertThat(template.getDcTermsPublisher(), is(exactPublisherString));
}
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies return an empty list if no match for publisher was found
*/
@Test
public void getMrrtRepdortTemplates_shouldReturnAnEmptyListIfNoMatchForPublisherWasFound() throws Exception {
MrrtReportTemplateSearchCriteria searchCriteria =
new MrrtReportTemplateSearchCriteria.Builder().withPublisher(NON_EXISTING_PUBLISHER)
.build();
List<MrrtReportTemplate> templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
assertNotNull(templates);
assertTrue(templates.isEmpty());
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateHtmlBody(MrrtReportTemplate)
* @verifies return the body content of the mrrt report template file
*/
@Test
public void getMrrtReportTemplateHtmlBody_shouldReturnTheBodyContentOfTheMrrtReportTemplateFile() throws Exception {
File tmpTemplateFile = temporaryFolder.newFile();
FileUtils.writeStringToFile(tmpTemplateFile,
"<html>" + "<head><title>Sample Template</title></head>" + "<body><p>Sample Template</p></body>" + "</html>");
MrrtReportTemplate mockTemplate = mock(MrrtReportTemplate.class);
when(mockTemplate.getPath()).thenReturn(tmpTemplateFile.getAbsolutePath());
String bodyContentReturned = mrrtReportTemplateService.getMrrtReportTemplateHtmlBody(mockTemplate);
assertNotNull(bodyContentReturned);
assertThat(bodyContentReturned, is("<p>Sample Template</p>"));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateHtmlBody(MrrtReportTemplate)
* @verifies throw illegal argument exception if given null
*/
@Test
public void getMrrtReportTemplateHtmlBody_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("mrrtReportTemplate cannot be null");
mrrtReportTemplateService.getMrrtReportTemplateHtmlBody(null);
}
/**
* @see MrrtReportTemplateService#saveMrrtReportTemplate(MrrtReportTemplate)
* @verifies save template object with terms if matching concept reference term was found
*/
@Test
public void saveMrrtReportTemplate_shouldSaveTemplateObjectWithTermsIfMatchingConceptReferenceTermWasFound()
throws Exception {
String templateString = getFileContent("mrrttemplates/ihe/connectathon/2015/CTChestAbdomen.html");
MrrtReportTemplate template = parser.parse(templateString);
MrrtReportTemplate saved = mrrtReportTemplateService.saveMrrtReportTemplate(template);
assertNotNull(saved);
assertThat(saved.getTerms()
.size(),
is(1));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplate(Integer)
* @verifies properly retrieve mrrt report templates with concept reference terms
*/
@Test
public void getMrrtReportTemplate_shouldProperlyRetrieveMrrtReportTemplatesWithConceptReferenceTerms() {
MrrtReportTemplate template = mrrtReportTemplateService.getMrrtReportTemplate(1);
assertNotNull(template);
assertThat(template.getTerms()
.size(),
is(1));
ConceptSource conceptSource = Context.getConceptService()
.getConceptSourceByName("RADLEX");
ConceptReferenceTerm referenceTerm = Context.getConceptService()
.getConceptReferenceTermByCode("RID10321", conceptSource);
assertThat(template.getTerms()
.contains(referenceTerm),
is(true));
}
}
| api/src/test/java/org/openmrs/module/radiology/report/template/MrrtReportTemplateServiceComponentTest.java | /**
* This Source Code Form is subject to the terms of the Mozilla Public License,
* v. 2.0. If a copy of the MPL was not distributed with this file, You can
* obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under
* the terms of the Healthcare Disclaimer located at http://openmrs.org/license.
*
* Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS
* graphic logo is a trademark of OpenMRS Inc.
*/
package org.openmrs.module.radiology.report.template;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.openmrs.ConceptReferenceTerm;
import org.openmrs.ConceptSource;
import org.openmrs.api.APIException;
import org.openmrs.api.AdministrationService;
import org.openmrs.api.context.Context;
import org.openmrs.module.radiology.RadiologyConstants;
import org.openmrs.module.radiology.RadiologyProperties;
import org.openmrs.test.BaseModuleContextSensitiveTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
/**
* Tests {@code MrrtReportTemplateService}.
*/
public class MrrtReportTemplateServiceComponentTest extends BaseModuleContextSensitiveTest {
@Autowired
@Qualifier("adminService")
private AdministrationService administrationService;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Autowired
private RadiologyProperties radiologyProperties;
@Autowired
private MrrtReportTemplateFileParser parser;
private static final String TEST_DATASET =
"org/openmrs/module/radiology/include/MrrtReportTemplateServiceComponentTestDataset.xml";
private static final int EXISTING_TEMPLATE_ID = 1;
private static final int NON_EXISTING_TEMPLATE_ID = 23;
private static final String NON_EXISTING_UUID = "invalid uuid";
private static final String EXISTING_TEMPLATE_TITLE = "title1";
private static final String NON_EXISTING_TEMPLATE_TITLE = "invalid";
private static final String TEMPLATE_IDENTIFIER = "1.3.6.1.4.1.21367.13.199.1015";
private static final String NON_EXISTING_PUBLISHER = "Non existing publisher";
private static final String UUID_FOR_TEMPLATE_ONE = "aa551445-def0-4f93-9047-95f0a9afbdce";
private static final String UUID_FOR_TEMPLATE_TWO = "59273e52-33b1-4fcb-8c1f-9b670bb11259";
@Autowired
private MrrtReportTemplateService mrrtReportTemplateService;
@Before
public void setUp() throws Exception {
executeDataSet(TEST_DATASET);
}
/**
* Get a files content as string.
*
* @param path the path to get the file content from
* @return the file content
*/
private String getFileContent(String path) throws IOException {
File file = getFile(path);
return getString(file);
}
/**
* Get a file from the test resources.
*
* @param path the path to get the file from
* @return the file on given path
*/
private File getFile(String path) {
return new File(getClass().getClassLoader()
.getResource(path)
.getFile());
}
/**
* Get a file from the test resources.
*
* @param file the file to get the content from
* @return the file content
*/
private String getString(File file) throws IOException {
String content = null;
try (InputStream in = new FileInputStream(file)) {
content = IOUtils.toString(in);
}
return content;
}
/**
* Sets up the global property defining the MRRT template directory using junits temporary folder.
*
* @throws IOException
*/
private void setUpTemporaryFolder() throws IOException {
File tempFolder = temporaryFolder.newFolder("/mrrt_templates");
administrationService.setGlobalProperty(RadiologyConstants.GP_MRRT_REPORT_TEMPLATE_DIR,
tempFolder.getAbsolutePath());
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplate(Integer)
* @verifies get template with given id
*/
@Test
public void getMrrtReportTemplate_shouldGetTemplateWithGivenId() throws Exception {
MrrtReportTemplate existingTemplate = mrrtReportTemplateService.getMrrtReportTemplate(EXISTING_TEMPLATE_ID);
assertNotNull(existingTemplate);
assertEquals(existingTemplate.getCharset(), "UTF-8");
assertEquals(existingTemplate.getDcTermsTitle(), "title1");
assertEquals(existingTemplate.getDcTermsLanguage(), "en");
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplate(Integer)
* @verifies return null if no match was found
*/
@Test
public void getMrrtReportTemplate_shouldReturnNullIfNoMatchWasFound() throws Exception {
assertNull(mrrtReportTemplateService.getMrrtReportTemplate(NON_EXISTING_TEMPLATE_ID));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplate(Integer)
* @verifies throw illegal argument exception if given null
*/
@Test
public void getMrrtReportTemplate_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("id cannot be null");
mrrtReportTemplateService.getMrrtReportTemplate(null);
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByUuid(String)
* @verifies find object given existing uuid
*/
@Test
public void getMrrtReportTemplateByUuid_shouldFindObjectGivenExistingUuid() {
MrrtReportTemplate valid = mrrtReportTemplateService.getMrrtReportTemplateByUuid(UUID_FOR_TEMPLATE_ONE);
assertNotNull(valid);
assertThat(valid.getTemplateId(), is(EXISTING_TEMPLATE_ID));
assertThat(valid.getUuid(), is(UUID_FOR_TEMPLATE_ONE));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByUuid(String)
* @verifies return null if no object found with given uuid
*/
@Test
public void getMrrtReportTemplateByUuid_shouldReturnNullIfNoObjectFoundWithGivenUuid() {
assertNull(mrrtReportTemplateService.getMrrtReportTemplateByUuid(NON_EXISTING_UUID));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByUuid(String)
* @verifies throw illegal argument exception if given null
*/
@Test
public void getMrrtReportTemplateByUuid_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("uuid cannot be null");
mrrtReportTemplateService.getMrrtReportTemplateByUuid(null);
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByIdentifier(String)
* @verifies find object given valid identifier
*/
@Test
public void getMrrtReportTemplateByIdentifier_shouldFindObjectWithGivenIdentifier() throws Exception {
MrrtReportTemplate template = mrrtReportTemplateService.getMrrtReportTemplateByIdentifier("identifier1");
assertNotNull(template);
assertThat(template.getDcTermsIdentifier(), is("identifier1"));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByIdentifier(String)
* @verifies return null if no object found with give identifier
*/
@Test
public void getMrrtReportTemplateByIdentifier_shouldReturnNullIfNoObjectFoundWithGivenIdentifier() throws Exception {
assertNull(mrrtReportTemplateService.getMrrtReportTemplateByIdentifier("invalid identifier"));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateByIdentifier(String)
* @verifies throw illegal argument exception if given null
*/
@Test
public void getMrrtReportTemplateByIdentifier_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("identifier cannot be null");
mrrtReportTemplateService.getMrrtReportTemplateByIdentifier(null);
}
/**
* @see MrrtReportTemplateService#purgeMrrtReportTemplate(MrrtReportTemplate)
* @verifies delete report template from database and also delete template file from the system
*/
@Test
public void purgeMrrtReportTemplate_shouldDeleteReportTemplateFromDatabaseAndAlsoDeleteTemplateFileFromTheSystem()
throws Exception {
setUpTemporaryFolder();
MrrtReportTemplate template = new MrrtReportTemplate();
File templateFile = new File(radiologyProperties.getReportTemplateHome(), java.util.UUID.randomUUID()
.toString());
templateFile.createNewFile();
template.setDcTermsTitle("sample title");
template.setDcTermsDescription("sample description");
template.setDcTermsIdentifier("identifier3");
template.setPath(templateFile.getAbsolutePath());
MrrtReportTemplate saved = mrrtReportTemplateService.saveMrrtReportTemplate(template);
assertNotNull(saved.getId());
File savedFile = new File(saved.getPath());
assertThat(savedFile.exists(), is(true));
mrrtReportTemplateService.purgeMrrtReportTemplate(saved);
assertNull(mrrtReportTemplateService.getMrrtReportTemplate(saved.getId()));
assertThat(savedFile.exists(), is(false));
}
/**
* @see MrrtReportTemplateService#purgeMrrtReportTemplate(MrrtReportTemplate)
* @verifies throw illegal argument exception if given null
*/
@Test
public void purgeMrrtReportTemplate_shouldThrowIlligalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("template cannot be null");
mrrtReportTemplateService.purgeMrrtReportTemplate(null);
}
/**
* @see MrrtReportTemplateService#purgeMrrtReportTemplate(MrrtReportTemplate)
* @verifies catch file not found exception when the file been deleted is missing
*/
@Test
public void purgeMrrtReportTemplate_shouldCatchFileNotFoundExceptionWhenTheFileBeenDeletedIsMissing() {
MrrtReportTemplate template = mrrtReportTemplateService.getMrrtReportTemplate(1);
assertNotNull(template);
assertThat(new File(template.getPath()).exists(), is(false));
mrrtReportTemplateService.purgeMrrtReportTemplate(template);
}
/**
* @see MrrtReportTemplateService#saveMrrtReportTemplate(MrrtReportTemplate)
* @verifies save given template
*/
@Test
public void saveMrrtReportTemplate_shouldSaveGivenTemplate() throws Exception {
MrrtReportTemplate template = new MrrtReportTemplate();
template.setDcTermsTitle("sample title");
template.setDcTermsDescription("sample description");
template.setDcTermsIdentifier("identifier3");
MrrtReportTemplate saved = mrrtReportTemplateService.saveMrrtReportTemplate(template);
MrrtReportTemplate newTemplate = mrrtReportTemplateService.getMrrtReportTemplate(saved.getTemplateId());
assertNotNull(saved);
assertNotNull(newTemplate);
assertEquals(newTemplate.getDcTermsTitle(), template.getDcTermsTitle());
assertEquals(newTemplate.getDcTermsDescription(), template.getDcTermsDescription());
}
/**
* @see MrrtReportTemplateService#saveMrrtReportTemplate(MrrtReportTemplate)
* @verifies throw api exception if saving template that already exists
*/
@Test
public void saveMrrtReportTemplate_shouldThrowApiExceptionIfSavingTemplateThatAlreadyExists() throws Exception {
MrrtReportTemplate existing = mrrtReportTemplateService.getMrrtReportTemplate(EXISTING_TEMPLATE_ID);
existing.setDcTermsTitle("modified");
expectedException.expect(APIException.class);
expectedException.expectMessage("Template already exist in the system.");
mrrtReportTemplateService.saveMrrtReportTemplate(existing);
}
/**
* @see MrrtReportTemplateService#saveMrrtReportTemplate(MrrtReportTemplate)
* @verifies throw illegal argument exception if given null
*/
@Test
public void saveMrrtReportTemplate_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("template cannot be null");
mrrtReportTemplateService.saveMrrtReportTemplate(null);
}
/**
* @verifies create mrrt report template in the database and on the file system
* @see MrrtReportTemplateService#importMrrtReportTemplate(String)
*/
@Test
public void importMrrtReportTemplate_shouldCreateMrrtReportTemplateInTheDatabaseAndOnTheFileSystem() throws Exception {
setUpTemporaryFolder();
String sourcePath = "mrrttemplates/ihe/connectathon/2015/CTChestAbdomen.html";
String template = getFileContent(sourcePath);
MrrtReportTemplate saved = mrrtReportTemplateService.importMrrtReportTemplate(template);
assertNotNull(saved);
assertThat(saved.getDcTermsIdentifier(), is(TEMPLATE_IDENTIFIER));
File templateHome = radiologyProperties.getReportTemplateHome();
File templatePath = new File(saved.getPath());
assertThat(templatePath.getParentFile()
.getName(),
is(templateHome.getName()));
assertTrue(FileUtils.contentEquals(getFile(sourcePath), templatePath.getAbsoluteFile()));
}
/**
* @verifies not create an mrrt report template in the database and store the template as file if given template is invalid
* @see MrrtReportTemplateService#importMrrtReportTemplate(String)
*/
@Test
public void
importMrrtReportTemplate_shouldNotCreateAnMrrtReportTemplateInTheDatabaseAndStoreTheTemplateAsFileIfGivenTemplateIsInvalid()
throws Exception {
setUpTemporaryFolder();
String template = getFileContent(
"mrrttemplates/ihe/connectathon/2015/invalidMrrtReportTemplate-noMetaElementWithCharsetAttribute.html");
expectedException.expect(APIException.class);
mrrtReportTemplateService.importMrrtReportTemplate(template);
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies return all mrrt report templates that match given title search query if title is specified
*/
@Test
public void getMrrtReportTemplates_shouldReturnAllMrrtReportTemplatesThatMatchGivenTitleSearchQueryIfTitleIsSpecified()
throws Exception {
MrrtReportTemplateSearchCriteria searchCriteria =
new MrrtReportTemplateSearchCriteria.Builder().withTitle(EXISTING_TEMPLATE_TITLE)
.build();
List<MrrtReportTemplate> templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
assertNotNull(templates);
assertThat(templates.size(), is(1));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies return an empty list of no match for title was found
*/
@Test
public void getMrrtRepdortTemplates_shouldReturnAnEmptyListOfNoMatchForTitleWasFound() throws Exception {
MrrtReportTemplateSearchCriteria searchCriteria =
new MrrtReportTemplateSearchCriteria.Builder().withTitle(NON_EXISTING_TEMPLATE_TITLE)
.build();
List<MrrtReportTemplate> templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
assertNotNull(templates);
assertTrue(templates.isEmpty());
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies throw illegal argument exception of given null
*/
@Test
public void getMrrtReportTemplates_shouldThrowIllegalArgumentExceptionOfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("mrrtReportTemplateSearchCriteria cannot be null");
mrrtReportTemplateService.getMrrtReportTemplates(null);
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies return all mrrt report templates that match given publisher anywhere in dcterms publisher insensitive to case
*/
@Test
public void
getMrrtReportTemplates_shouldReturnAllMrrtReportTemplatesThatMatchGivenPublisherAnywhereInDctermsPublisherInsensitiveToCase()
throws Exception {
MrrtReportTemplateSearchCriteria searchCriteria = new MrrtReportTemplateSearchCriteria.Builder().withPublisher("cat")
.build();
List<MrrtReportTemplate> templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
List<String> uuids = new ArrayList<>();
for (MrrtReportTemplate template : templates) {
uuids.add(template.getUuid());
}
assertNotNull(templates);
assertThat(templates.size(), is(2));
assertThat(uuids.contains(UUID_FOR_TEMPLATE_ONE), is(true));
assertThat(uuids.contains(UUID_FOR_TEMPLATE_TWO), is(true));
searchCriteria = new MrrtReportTemplateSearchCriteria.Builder().withPublisher("IHE CAT Publisher")
.build();
templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
uuids = new ArrayList<>();
for (MrrtReportTemplate template : templates) {
uuids.add(template.getUuid());
}
assertNotNull(templates);
assertThat(templates.size(), is(1));
assertThat(uuids.contains(UUID_FOR_TEMPLATE_ONE), is(true));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplates(MrrtReportTemplateSearchCriteria)
* @verifies return an empty list if no match for publisher was found
*/
@Test
public void getMrrtRepdortTemplates_shouldReturnAnEmptyListIfNoMatchForPublisherWasFound() throws Exception {
MrrtReportTemplateSearchCriteria searchCriteria =
new MrrtReportTemplateSearchCriteria.Builder().withPublisher(NON_EXISTING_PUBLISHER)
.build();
List<MrrtReportTemplate> templates = mrrtReportTemplateService.getMrrtReportTemplates(searchCriteria);
assertNotNull(templates);
assertTrue(templates.isEmpty());
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateHtmlBody(MrrtReportTemplate)
* @verifies return the body content of the mrrt report template file
*/
@Test
public void getMrrtReportTemplateHtmlBody_shouldReturnTheBodyContentOfTheMrrtReportTemplateFile() throws Exception {
File tmpTemplateFile = temporaryFolder.newFile();
FileUtils.writeStringToFile(tmpTemplateFile,
"<html>" + "<head><title>Sample Template</title></head>" + "<body><p>Sample Template</p></body>" + "</html>");
MrrtReportTemplate mockTemplate = mock(MrrtReportTemplate.class);
when(mockTemplate.getPath()).thenReturn(tmpTemplateFile.getAbsolutePath());
String bodyContentReturned = mrrtReportTemplateService.getMrrtReportTemplateHtmlBody(mockTemplate);
assertNotNull(bodyContentReturned);
assertThat(bodyContentReturned, is("<p>Sample Template</p>"));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplateHtmlBody(MrrtReportTemplate)
* @verifies throw illegal argument exception if given null
*/
@Test
public void getMrrtReportTemplateHtmlBody_shouldThrowIllegalArgumentExceptionIfGivenNull() throws Exception {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("mrrtReportTemplate cannot be null");
mrrtReportTemplateService.getMrrtReportTemplateHtmlBody(null);
}
/**
* @see MrrtReportTemplateService#saveMrrtReportTemplate(MrrtReportTemplate)
* @verifies save template object with terms if matching concept reference term was found
*/
@Test
public void saveMrrtReportTemplate_shouldSaveTemplateObjectWithTermsIfMatchingConceptReferenceTermWasFound()
throws Exception {
String templateString = getFileContent("mrrttemplates/ihe/connectathon/2015/CTChestAbdomen.html");
MrrtReportTemplate template = parser.parse(templateString);
MrrtReportTemplate saved = mrrtReportTemplateService.saveMrrtReportTemplate(template);
assertNotNull(saved);
assertThat(saved.getTerms()
.size(),
is(1));
}
/**
* @see MrrtReportTemplateService#getMrrtReportTemplate(Integer)
* @verifies properly retrieve mrrt report templates with concept reference terms
*/
@Test
public void getMrrtReportTemplate_shouldProperlyRetrieveMrrtReportTemplatesWithConceptReferenceTerms() {
MrrtReportTemplate template = mrrtReportTemplateService.getMrrtReportTemplate(1);
assertNotNull(template);
assertThat(template.getTerms()
.size(),
is(1));
ConceptSource conceptSource = Context.getConceptService()
.getConceptSourceByName("RADLEX");
ConceptReferenceTerm referenceTerm = Context.getConceptService()
.getConceptReferenceTermByCode("RID10321", conceptSource);
assertThat(template.getTerms()
.contains(referenceTerm),
is(true));
}
}
| RAD-376 Adapt test for getting publisher
adapt test for getMrrtReportTemplates by the publisher string to test
if the returned templates publisher string matches/partially matches
the publisher defined in the search criteria
| api/src/test/java/org/openmrs/module/radiology/report/template/MrrtReportTemplateServiceComponentTest.java | RAD-376 Adapt test for getting publisher |
|
Java | agpl-3.0 | 96b936e4a427209a5c57cc3622878439e27c5786 | 0 | geomajas/geomajas-project-deskmanager,geomajas/geomajas-project-deskmanager | /*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2014 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the GNU Affero
* General Public License. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.plugin.deskmanager.client.gwt.manager.security.view;
import com.google.gwt.core.client.GWT;
import com.smartgwt.client.types.Side;
import com.smartgwt.client.widgets.Canvas;
import com.smartgwt.client.widgets.IButton;
import com.smartgwt.client.widgets.events.ClickEvent;
import com.smartgwt.client.widgets.events.ClickHandler;
import com.smartgwt.client.widgets.layout.HLayout;
import com.smartgwt.client.widgets.layout.VLayout;
import com.smartgwt.client.widgets.tab.Tab;
import com.smartgwt.client.widgets.tab.TabSet;
import com.smartgwt.client.widgets.tab.events.TabSelectedEvent;
import com.smartgwt.client.widgets.tab.events.TabSelectedHandler;
import org.geomajas.gwt.client.util.WidgetLayout;
import org.geomajas.plugin.deskmanager.client.gwt.common.GdmLayout;
import org.geomajas.plugin.deskmanager.client.gwt.common.util.DeskmanagerLayout;
import org.geomajas.plugin.deskmanager.client.gwt.manager.i18n.ManagerMessages;
import org.geomajas.plugin.deskmanager.client.gwt.manager.security.presenter.ObjectsTabHandler;
import org.geomajas.plugin.deskmanager.client.gwt.manager.security.presenter.UsersAndGroupsHandler;
import org.geomajas.plugin.deskmanager.client.gwt.manager.service.Manager;
import org.geomajas.plugin.deskmanager.domain.security.dto.TerritoryDto;
import org.geomajas.plugin.deskmanager.domain.security.dto.UserDto;
import java.util.HashMap;
import java.util.Map;
/**
* Frontend presentation of a selectable list of users/groups, and a panel that contains configuration options of the
* selected user or group.
*
* @author Jan De Moerloose
* @author Jan Venstermans
*/
public class UsersAndGroups extends VLayout implements UsersAndGroupsView {
private static final ManagerMessages MESSAGES = GWT.create(ManagerMessages.class);
private TabSet gridTabSet;
private Map<UsersAndGroupsHandler.MainTab, Tab> mainTabsMap
= new HashMap<UsersAndGroupsHandler.MainTab, Tab>();
private Map<UsersAndGroupsHandler.MainTab, TabSet> tabSetWithSubTabsMap
= new HashMap<UsersAndGroupsHandler.MainTab, TabSet>();
private Map<UsersAndGroupsHandler.MainTab, Map<ObjectsTabHandler.SubTab, Tab>> subTabMap
= new HashMap<UsersAndGroupsHandler.MainTab, Map<ObjectsTabHandler.SubTab, Tab>>();
private TabSet usersTabSet;
private Tab userDetailTab;
private static final int MARGIN = 20;
private UsersAndGroupsHandler handler;
public UsersAndGroups() {
super(MARGIN);
// --------- Top part
VLayout topContainer = new VLayout(5);
topContainer.setShowResizeBar(true);
topContainer.setMinHeight(200);
topContainer.setHeight("30%");
topContainer.setLayoutBottomMargin(5);
/* Add buttons */
IButton userButtonNew = new IButton(MESSAGES.securityNewUserButtonText());
userButtonNew.setWidth(userButtonNew.getTitle().length() * GdmLayout.buttonFontWidth + GdmLayout.buttonOffset);
userButtonNew.setIcon(WidgetLayout.iconAdd);
userButtonNew.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
createObject(UsersAndGroupsHandler.MainTab.USERS);
}
});
IButton groupButtonNew = new IButton(MESSAGES.securityNewGroupButtonText());
groupButtonNew.setWidth(userButtonNew.getTitle().length() * GdmLayout.buttonFontWidth + GdmLayout.buttonOffset);
groupButtonNew.setIcon(WidgetLayout.iconAdd);
groupButtonNew.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
createObject(UsersAndGroupsHandler.MainTab.GROUPS);
}
});
UsersView usersView = Manager.getUsersManagementViews().getUsersView();
GroupsView groupsView = Manager.getUsersManagementViews().getGroupsView();
AdminAssignView adminsView = Manager.getUsersManagementViews().getAdminsView();
/* main tabs*/
VLayout userLayout = new VLayout();
userLayout.addMember((Canvas) usersView);
userLayout.addMember(userButtonNew);
Tab usersTab = new Tab(MESSAGES.securityUsersTab());
usersTab.setPane(userLayout);
VLayout groupLayout = new VLayout();
groupLayout.addMember((Canvas) groupsView);
groupLayout.addMember(groupButtonNew);
Tab groupsTab = new Tab(MESSAGES.securityGroupsTab());
groupsTab.setPane(groupLayout);
VLayout adminLayout = new VLayout();
adminLayout.addMember((Canvas) adminsView);
Tab adminsTab = new Tab(MESSAGES.securityAdminsTab());
adminsTab.setPane(adminLayout);
gridTabSet = new TabSet();
gridTabSet.addTab(usersTab);
gridTabSet.addTab(groupsTab);
gridTabSet.addTab(adminsTab);
gridTabSet.setTabBarPosition(Side.LEFT);
gridTabSet.setTabBarThickness(DeskmanagerLayout.tabBarLeftThickness);
topContainer.addMember(gridTabSet);
// Bottom part-------------------
UserDetailView userDetail = Manager.getUsersManagementViews().getUserDetailView();
userDetailTab = new Tab(MESSAGES.securityUserDetailTab());
userDetailTab.setPane((Canvas) userDetail);
RoleSelectAssignView<UserDto, TerritoryDto> userAssign = Manager.getUsersManagementViews().getUserAssignView();
Tab userAssignTab = new Tab(MESSAGES.securityUserAssignTab());
userAssignTab.setPane((Canvas) userAssign);
usersTabSet = new TabSet();
usersTabSet.addTab(userDetailTab);
usersTabSet.addTab(userAssignTab);
GroupDetailView groupDetail = Manager.getUsersManagementViews().getGroupDetailView();
Tab groupDetailTab = new Tab(MESSAGES.securityGroupDetailTab());
groupDetailTab.setPane((Canvas) groupDetail);
RoleSelectAssignView groupAssign = Manager.getUsersManagementViews().getGroupAssignView();
Tab groupAssignTab = new Tab(MESSAGES.securityGroupAssignTab());
groupAssignTab.setPane((Canvas) groupAssign);
TabSet groupsTabSet = new TabSet();
groupsTabSet.addTab(groupDetailTab);
groupsTabSet.addTab(groupAssignTab);
HLayout detailContainer = new HLayout();
detailContainer.setMinHeight(200);
detailContainer.setHeight("40%");
detailContainer.setLayoutTopMargin(5);
detailContainer.addMember(usersTabSet);
detailContainer.addMember(groupsTabSet);
addMember(topContainer);
addMember(detailContainer);
// Whiteboard.registerHandler(this);
/* fill the maps */
// create tabSetWithMainTabsMap
mainTabsMap.put(UsersAndGroupsHandler.MainTab.USERS, usersTab);
mainTabsMap.put(UsersAndGroupsHandler.MainTab.GROUPS, groupsTab);
mainTabsMap.put(UsersAndGroupsHandler.MainTab.ADMINS, adminsTab);
// create tabSetWithSubTabsMap
tabSetWithSubTabsMap.put(UsersAndGroupsHandler.MainTab.USERS, usersTabSet);
tabSetWithSubTabsMap.put(UsersAndGroupsHandler.MainTab.GROUPS, groupsTabSet);
// create subTabMap
Map<ObjectsTabHandler.SubTab, Tab> usersSubTabMap = new HashMap<ObjectsTabHandler.SubTab, Tab>();
usersSubTabMap.put(ObjectsTabHandler.SubTab.DETAILS, userDetailTab);
usersSubTabMap.put(ObjectsTabHandler.SubTab.ASSIGN, userAssignTab);
subTabMap.put(UsersAndGroupsHandler.MainTab.USERS, usersSubTabMap);
Map<ObjectsTabHandler.SubTab, Tab> groupsSubTabMap = new HashMap<ObjectsTabHandler.SubTab, Tab>();
groupsSubTabMap.put(ObjectsTabHandler.SubTab.DETAILS, groupDetailTab);
groupsSubTabMap.put(ObjectsTabHandler.SubTab.ASSIGN, groupAssignTab);
subTabMap.put(UsersAndGroupsHandler.MainTab.GROUPS, groupsSubTabMap);
}
@Override
public void setHandler(UsersAndGroupsHandler handler) {
this.handler = handler;
bind();
}
private void bind() {
gridTabSet.addTabSelectedHandler(new TabSelectedHandler() {
@Override
public void onTabSelected(TabSelectedEvent event) {
if (mainTabsMap.containsValue(event.getTab())) {
for (Map.Entry<UsersAndGroupsHandler.MainTab, Tab> entry : mainTabsMap.entrySet()) {
if (entry.getValue().equals(event.getTab())) {
onMainTabSelected(entry.getKey());
}
}
}
}
});
}
private void createObject(UsersAndGroupsHandler.MainTab objectTab) {
handler.onCreateObjectForTab(objectTab);
}
@Override
public void readData() {
// if (gridTabSet.getSelectedTabNumber() == 0) {
// showUsers();
// } else {
// showGroups();
// }
}
@Override
public void selectSubTab(UsersAndGroupsHandler.MainTab mainTab, ObjectsTabHandler.SubTab subTab) {
if (subTabMap.containsKey(mainTab) && tabSetWithSubTabsMap.containsKey(mainTab)) {
TabSet tabParent = tabSetWithSubTabsMap.get(mainTab);
Map<ObjectsTabHandler.SubTab, Tab> secondTabMap = subTabMap.get(mainTab);
if (secondTabMap.containsKey(subTab)) {
Tab tabChild = secondTabMap.get(subTab);
tabParent.selectTab(tabChild);
}
}
}
private void onMainTabSelected(UsersAndGroupsHandler.MainTab mainTab) {
handler.onMainTabSelected(mainTab);
// visibility of button part of our view, we manage this ourselves
showTabSetOfMainTab(mainTab);
}
private void showTabSetOfMainTab(UsersAndGroupsHandler.MainTab mainTab) {
for (Map.Entry<UsersAndGroupsHandler.MainTab, TabSet> entry : tabSetWithSubTabsMap.entrySet()) {
if (entry.getKey().equals(mainTab)) {
entry.getValue().show();
} else {
entry.getValue().hide();
}
}
}
}
| deskmanager-gwt/src/main/java/org/geomajas/plugin/deskmanager/client/gwt/manager/security/view/UsersAndGroups.java | /*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2014 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the GNU Affero
* General Public License. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.plugin.deskmanager.client.gwt.manager.security.view;
import com.google.gwt.core.client.GWT;
import com.smartgwt.client.types.Side;
import com.smartgwt.client.widgets.Canvas;
import com.smartgwt.client.widgets.IButton;
import com.smartgwt.client.widgets.events.ClickEvent;
import com.smartgwt.client.widgets.events.ClickHandler;
import com.smartgwt.client.widgets.layout.HLayout;
import com.smartgwt.client.widgets.layout.VLayout;
import com.smartgwt.client.widgets.tab.Tab;
import com.smartgwt.client.widgets.tab.TabSet;
import com.smartgwt.client.widgets.tab.events.TabSelectedEvent;
import com.smartgwt.client.widgets.tab.events.TabSelectedHandler;
import org.geomajas.gwt.client.util.WidgetLayout;
import org.geomajas.plugin.deskmanager.client.gwt.common.GdmLayout;
import org.geomajas.plugin.deskmanager.client.gwt.manager.i18n.ManagerMessages;
import org.geomajas.plugin.deskmanager.client.gwt.manager.security.presenter.ObjectsTabHandler;
import org.geomajas.plugin.deskmanager.client.gwt.manager.security.presenter.UsersAndGroupsHandler;
import org.geomajas.plugin.deskmanager.client.gwt.manager.service.Manager;
import org.geomajas.plugin.deskmanager.domain.security.dto.TerritoryDto;
import org.geomajas.plugin.deskmanager.domain.security.dto.UserDto;
import java.util.HashMap;
import java.util.Map;
/**
* Frontend presentation of a selectable list of users/groups, and a panel that contains configuration options of the
* selected user or group.
*
* @author Jan De Moerloose
* @author Jan Venstermans
*/
public class UsersAndGroups extends VLayout implements UsersAndGroupsView {
private static final ManagerMessages MESSAGES = GWT.create(ManagerMessages.class);
private TabSet gridTabSet;
private Map<UsersAndGroupsHandler.MainTab, Tab> mainTabsMap
= new HashMap<UsersAndGroupsHandler.MainTab, Tab>();
private Map<UsersAndGroupsHandler.MainTab, TabSet> tabSetWithSubTabsMap
= new HashMap<UsersAndGroupsHandler.MainTab, TabSet>();
private Map<UsersAndGroupsHandler.MainTab, Map<ObjectsTabHandler.SubTab, Tab>> subTabMap
= new HashMap<UsersAndGroupsHandler.MainTab, Map<ObjectsTabHandler.SubTab, Tab>>();
private TabSet usersTabSet;
private Tab userDetailTab;
private static final int MARGIN = 20;
private UsersAndGroupsHandler handler;
public UsersAndGroups() {
super(MARGIN);
// --------- Top part
VLayout topContainer = new VLayout(5);
topContainer.setShowResizeBar(true);
topContainer.setMinHeight(200);
topContainer.setHeight("30%");
topContainer.setLayoutBottomMargin(5);
/* Add buttons */
IButton userButtonNew = new IButton(MESSAGES.securityNewUserButtonText());
userButtonNew.setWidth(userButtonNew.getTitle().length() * GdmLayout.buttonFontWidth + GdmLayout.buttonOffset);
userButtonNew.setIcon(WidgetLayout.iconAdd);
userButtonNew.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
createObject(UsersAndGroupsHandler.MainTab.USERS);
}
});
IButton groupButtonNew = new IButton(MESSAGES.securityNewGroupButtonText());
groupButtonNew.setWidth(userButtonNew.getTitle().length() * GdmLayout.buttonFontWidth + GdmLayout.buttonOffset);
groupButtonNew.setIcon(WidgetLayout.iconAdd);
groupButtonNew.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
createObject(UsersAndGroupsHandler.MainTab.GROUPS);
}
});
UsersView usersView = Manager.getUsersManagementViews().getUsersView();
GroupsView groupsView = Manager.getUsersManagementViews().getGroupsView();
AdminAssignView adminsView = Manager.getUsersManagementViews().getAdminsView();
/* main tabs*/
VLayout userLayout = new VLayout();
userLayout.addMember((Canvas) usersView);
userLayout.addMember(userButtonNew);
Tab usersTab = new Tab(MESSAGES.securityUsersTab());
usersTab.setPane(userLayout);
VLayout groupLayout = new VLayout();
groupLayout.addMember((Canvas) groupsView);
groupLayout.addMember(groupButtonNew);
Tab groupsTab = new Tab(MESSAGES.securityGroupsTab());
groupsTab.setPane(groupLayout);
VLayout adminLayout = new VLayout();
adminLayout.addMember((Canvas) adminsView);
Tab adminsTab = new Tab(MESSAGES.securityAdminsTab());
adminsTab.setPane(adminLayout);
gridTabSet = new TabSet();
gridTabSet.addTab(usersTab);
gridTabSet.addTab(groupsTab);
gridTabSet.addTab(adminsTab);
gridTabSet.setTabBarPosition(Side.LEFT);
gridTabSet.setTabBarThickness(70);
topContainer.addMember(gridTabSet);
// Bottom part-------------------
UserDetailView userDetail = Manager.getUsersManagementViews().getUserDetailView();
userDetailTab = new Tab(MESSAGES.securityUserDetailTab());
userDetailTab.setPane((Canvas) userDetail);
RoleSelectAssignView<UserDto, TerritoryDto> userAssign = Manager.getUsersManagementViews().getUserAssignView();
Tab userAssignTab = new Tab(MESSAGES.securityUserAssignTab());
userAssignTab.setPane((Canvas) userAssign);
usersTabSet = new TabSet();
usersTabSet.addTab(userDetailTab);
usersTabSet.addTab(userAssignTab);
GroupDetailView groupDetail = Manager.getUsersManagementViews().getGroupDetailView();
Tab groupDetailTab = new Tab(MESSAGES.securityGroupDetailTab());
groupDetailTab.setPane((Canvas) groupDetail);
RoleSelectAssignView groupAssign = Manager.getUsersManagementViews().getGroupAssignView();
Tab groupAssignTab = new Tab(MESSAGES.securityGroupAssignTab());
groupAssignTab.setPane((Canvas) groupAssign);
TabSet groupsTabSet = new TabSet();
groupsTabSet.addTab(groupDetailTab);
groupsTabSet.addTab(groupAssignTab);
HLayout detailContainer = new HLayout();
detailContainer.setMinHeight(200);
detailContainer.setHeight("40%");
detailContainer.setLayoutTopMargin(5);
detailContainer.addMember(usersTabSet);
detailContainer.addMember(groupsTabSet);
addMember(topContainer);
addMember(detailContainer);
// Whiteboard.registerHandler(this);
/* fill the maps */
// create tabSetWithMainTabsMap
mainTabsMap.put(UsersAndGroupsHandler.MainTab.USERS, usersTab);
mainTabsMap.put(UsersAndGroupsHandler.MainTab.GROUPS, groupsTab);
mainTabsMap.put(UsersAndGroupsHandler.MainTab.ADMINS, adminsTab);
// create tabSetWithSubTabsMap
tabSetWithSubTabsMap.put(UsersAndGroupsHandler.MainTab.USERS, usersTabSet);
tabSetWithSubTabsMap.put(UsersAndGroupsHandler.MainTab.GROUPS, groupsTabSet);
// create subTabMap
Map<ObjectsTabHandler.SubTab, Tab> usersSubTabMap = new HashMap<ObjectsTabHandler.SubTab, Tab>();
usersSubTabMap.put(ObjectsTabHandler.SubTab.DETAILS, userDetailTab);
usersSubTabMap.put(ObjectsTabHandler.SubTab.ASSIGN, userAssignTab);
subTabMap.put(UsersAndGroupsHandler.MainTab.USERS, usersSubTabMap);
Map<ObjectsTabHandler.SubTab, Tab> groupsSubTabMap = new HashMap<ObjectsTabHandler.SubTab, Tab>();
groupsSubTabMap.put(ObjectsTabHandler.SubTab.DETAILS, groupDetailTab);
groupsSubTabMap.put(ObjectsTabHandler.SubTab.ASSIGN, groupAssignTab);
subTabMap.put(UsersAndGroupsHandler.MainTab.GROUPS, groupsSubTabMap);
}
@Override
public void setHandler(UsersAndGroupsHandler handler) {
this.handler = handler;
bind();
}
private void bind() {
gridTabSet.addTabSelectedHandler(new TabSelectedHandler() {
@Override
public void onTabSelected(TabSelectedEvent event) {
if (mainTabsMap.containsValue(event.getTab())) {
for (Map.Entry<UsersAndGroupsHandler.MainTab, Tab> entry : mainTabsMap.entrySet()) {
if (entry.getValue().equals(event.getTab())) {
onMainTabSelected(entry.getKey());
}
}
}
}
});
}
private void createObject(UsersAndGroupsHandler.MainTab objectTab) {
handler.onCreateObjectForTab(objectTab);
}
@Override
public void readData() {
// if (gridTabSet.getSelectedTabNumber() == 0) {
// showUsers();
// } else {
// showGroups();
// }
}
@Override
public void selectSubTab(UsersAndGroupsHandler.MainTab mainTab, ObjectsTabHandler.SubTab subTab) {
if (subTabMap.containsKey(mainTab) && tabSetWithSubTabsMap.containsKey(mainTab)) {
TabSet tabParent = tabSetWithSubTabsMap.get(mainTab);
Map<ObjectsTabHandler.SubTab, Tab> secondTabMap = subTabMap.get(mainTab);
if (secondTabMap.containsKey(subTab)) {
Tab tabChild = secondTabMap.get(subTab);
tabParent.selectTab(tabChild);
}
}
}
private void onMainTabSelected(UsersAndGroupsHandler.MainTab mainTab) {
handler.onMainTabSelected(mainTab);
// visibility of button part of our view, we manage this ourselves
showTabSetOfMainTab(mainTab);
}
private void showTabSetOfMainTab(UsersAndGroupsHandler.MainTab mainTab) {
for (Map.Entry<UsersAndGroupsHandler.MainTab, TabSet> entry : tabSetWithSubTabsMap.entrySet()) {
if (entry.getKey().equals(mainTab)) {
entry.getValue().show();
} else {
entry.getValue().hide();
}
}
}
}
| GWT-659: bugfix: set width tabs users and groups to configurable parameter
| deskmanager-gwt/src/main/java/org/geomajas/plugin/deskmanager/client/gwt/manager/security/view/UsersAndGroups.java | GWT-659: bugfix: set width tabs users and groups to configurable parameter |
|
Java | lgpl-2.1 | 62404467ed052cc18765c35a4d5c682bfc9ddd45 | 0 | MenoData/Time4J | /*
* -----------------------------------------------------------------------
* Copyright © 2013-2015 Meno Hochschild, <http://www.menodata.de/>
* -----------------------------------------------------------------------
* This file (PlainTime.java) is part of project Time4J.
*
* Time4J is free software: You can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 2.1 of the License, or
* (at your option) any later version.
*
* Time4J is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Time4J. If not, see <http://www.gnu.org/licenses/>.
* -----------------------------------------------------------------------
*/
package net.time4j;
import net.time4j.base.MathUtils;
import net.time4j.base.TimeSource;
import net.time4j.base.UnixTime;
import net.time4j.base.WallTime;
import net.time4j.engine.AttributeQuery;
import net.time4j.engine.ChronoDisplay;
import net.time4j.engine.ChronoElement;
import net.time4j.engine.ChronoEntity;
import net.time4j.engine.ChronoMerger;
import net.time4j.engine.Chronology;
import net.time4j.engine.ElementRule;
import net.time4j.engine.FormattableElement;
import net.time4j.engine.Temporal;
import net.time4j.engine.TimeAxis;
import net.time4j.engine.TimePoint;
import net.time4j.engine.UnitRule;
import net.time4j.engine.ValidationElement;
import net.time4j.format.Attributes;
import net.time4j.format.CalendarType;
import net.time4j.format.ChronoFormatter;
import net.time4j.format.ChronoPattern;
import net.time4j.format.DisplayMode;
import net.time4j.format.Leniency;
import net.time4j.tz.Timezone;
import net.time4j.tz.ZonalOffset;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.DateFormat;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
/**
* <p>Represents a plain wall time without any timezone or date component
* as defined in ISO-8601 up to nanosecond precision. </p>
*
* <p>This type also supports the special value 24:00 in its state space.
* That value means midnight at the end of day and can be both set and
* queried. </p>
*
* <p>Following elements which are declared as constants are registered by
* this class: </p>
*
* <ul>
* <li>{@link #COMPONENT}</li>
* <li>{@link #AM_PM_OF_DAY}</li>
* <li>{@link #CLOCK_HOUR_OF_AMPM}</li>
* <li>{@link #CLOCK_HOUR_OF_DAY}</li>
* <li>{@link #DIGITAL_HOUR_OF_AMPM}</li>
* <li>{@link #DIGITAL_HOUR_OF_DAY}</li>
* <li>{@link #ISO_HOUR}</li>
* <li>{@link #MINUTE_OF_HOUR}</li>
* <li>{@link #MINUTE_OF_DAY}</li>
* <li>{@link #SECOND_OF_MINUTE}</li>
* <li>{@link #SECOND_OF_DAY}</li>
* <li>{@link #MILLI_OF_SECOND}</li>
* <li>{@link #MICRO_OF_SECOND}</li>
* <li>{@link #NANO_OF_SECOND}</li>
* <li>{@link #MILLI_OF_DAY}</li>
* <li>{@link #MICRO_OF_DAY}</li>
* <li>{@link #NANO_OF_DAY}</li>
* <li>{@link #PRECISION}</li>
* <li>{@link #DECIMAL_HOUR}</li>
* <li>{@link #DECIMAL_MINUTE}</li>
* <li>{@link #DECIMAL_SECOND}</li>
* </ul>
*
* @author Meno Hochschild
* @doctags.concurrency <immutable>
*/
/*[deutsch]
* <p>Repräsentiert eine reine Uhrzeit ohne Zeitzonen- oder Datumsteil
* nach dem ISO-8601-Standard in maximal Nanosekundengenauigkeit. </p>
*
* <p>Diese Klasse unterstützt auch den Spezialwert T24:00 in ihrem
* Zustandsraum, während die Klasse {@code PlainTimestamp} den Wert
* lediglich in der Instanzerzeugung, aber nicht in der Manipulation von
* Daten akzeptiert. </p>
*
* <p>Registriert sind folgende als Konstanten deklarierte Elemente: </p>
*
* <ul>
* <li>{@link #COMPONENT}</li>
* <li>{@link #AM_PM_OF_DAY}</li>
* <li>{@link #CLOCK_HOUR_OF_AMPM}</li>
* <li>{@link #CLOCK_HOUR_OF_DAY}</li>
* <li>{@link #DIGITAL_HOUR_OF_AMPM}</li>
* <li>{@link #DIGITAL_HOUR_OF_DAY}</li>
* <li>{@link #ISO_HOUR}</li>
* <li>{@link #MINUTE_OF_HOUR}</li>
* <li>{@link #MINUTE_OF_DAY}</li>
* <li>{@link #SECOND_OF_MINUTE}</li>
* <li>{@link #SECOND_OF_DAY}</li>
* <li>{@link #MILLI_OF_SECOND}</li>
* <li>{@link #MICRO_OF_SECOND}</li>
* <li>{@link #NANO_OF_SECOND}</li>
* <li>{@link #MILLI_OF_DAY}</li>
* <li>{@link #MICRO_OF_DAY}</li>
* <li>{@link #NANO_OF_DAY}</li>
* <li>{@link #PRECISION}</li>
* <li>{@link #DECIMAL_HOUR}</li>
* <li>{@link #DECIMAL_MINUTE}</li>
* <li>{@link #DECIMAL_SECOND}</li>
* </ul>
*
* @author Meno Hochschild
* @doctags.concurrency <immutable>
*/
@CalendarType("iso8601")
public final class PlainTime
extends TimePoint<IsoTimeUnit, PlainTime>
implements WallTime, Temporal<PlainTime> {
//~ Statische Felder/Initialisierungen --------------------------------
/**
* System-Property für die Darstellung des Dezimaltrennzeichens.
*/
static final char ISO_DECIMAL_SEPARATOR = (
Boolean.getBoolean("net.time4j.format.iso.decimal.dot")
? '.'
: ',' // Empfehlung des ISO-Standards
);
private static final int MRD = 1000000000;
private static final int MIO = 1000000;
private static final int KILO = 1000;
private static final BigDecimal DECIMAL_60 = new BigDecimal(60);
private static final BigDecimal DECIMAL_3600 = new BigDecimal(3600);
private static final BigDecimal DECIMAL_MRD = new BigDecimal(MRD);
private static final BigDecimal DECIMAL_24_0 =
new BigDecimal("24");
private static final BigDecimal DECIMAL_23_9 =
new BigDecimal("23.999999999999999");
private static final BigDecimal DECIMAL_59_9 =
new BigDecimal("59.999999999999999");
private static final PlainTime[] HOURS = new PlainTime[25];
private static final long serialVersionUID = 2780881537313863339L;
static {
for (int i = 0; i <= 24; i++) {
HOURS[i] = new PlainTime(i, 0, 0, 0, false);
}
}
/** Minimalwert. */
static final PlainTime MIN = HOURS[0];
/** Maximalwert. */
static final PlainTime MAX = HOURS[24];
/** Uhrzeitkomponente. */
static final ChronoElement<PlainTime> WALL_TIME = TimeElement.INSTANCE;
/**
* <p>Element with the wall time in the value range
* {@code [T00:00:00,000000000]} until {@code [T24:00:00,000000000]}
* (inclusive in the context of {@code PlainTime} else exclusive). </p>
*
* <p>Example of usage: </p>
*
* <pre>
* PlainTimestamp tsp =
* PlainTimestamp.localFormatter("uuuu-MM-dd", PatternType.CLDR)
* .withDefault(
* PlainTime.COMPONENT,
* PlainTime.midnightAtStartOfDay())
* .parse("2014-08-20");
* System.out.println(tsp); // output: 2014-08-20T00
* </pre>
*
* <p>Note: This element does not define any base unit. </p>
*
* @since 1.2
*/
/*[deutsch]
* <p>Element mit der Uhrzeit im Wertebereich {@code [T00:00:00,000000000]}
* bis {@code [T24:00:00,000000000]} (inklusive im Kontext von
* {@code PlainTime}, sonst exklusive). </p>
*
* <p>Beispiel: </p>
*
* <pre>
* PlainTimestamp tsp =
* PlainTimestamp.localFormatter("uuuu-MM-dd", PatternType.CLDR)
* .withDefault(
* PlainTime.COMPONENT,
* PlainTime.midnightAtStartOfDay())
* .parse("2014-08-20");
* System.out.println(tsp); // output: 2014-08-20T00
* </pre>
*
* <p>Hinweis: Dieses Element definiert keine Basiseinheit. </p>
*
* @since 1.2
*/
public static final WallTimeElement COMPONENT = TimeElement.INSTANCE;
/**
* <p>Element with the half day section relative to noon (ante meridiem
* or post meridiem). </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. In detail
* the mapping from hours to meridiem values looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>AM_PM_OF_DAY</td><td>AM</td><td>AM</td><td>...</td><td>AM</td>
* <td>PM</td><td>PM</td><td>...</td><td>PM</td><td>AM</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*
* <p>Example: </p>
*
* <pre>
* import static net.time4j.PlainTime.AM_PM_OF_DAY;
*
* PlainTime time = PlainTime.of(12, 45, 20);
* System.out.println(time.get(AM_PM_OF_DAY));
* // Output: PM
* </pre>
*
* <p>This element does not define a base unit. </p>
*/
/*[deutsch]
* <p>Element mit dem Tagesabschnitt relativ zur Mittagszeit (Vormittag
* oder Nachmittag). </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Im Detail
* sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>AM_PM_OF_DAY</td><td>AM</td><td>AM</td><td>...</td><td>AM</td>
* <td>PM</td><td>PM</td><td>...</td><td>PM</td><td>AM</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*
* <p>Anwendungsbeispiel: </p>
*
* <pre>
* import static net.time4j.PlainTime.AM_PM_OF_DAY;
*
* PlainTime time = PlainTime.of(12, 45, 20);
* System.out.println(time.get(AM_PM_OF_DAY));
* // Ausgabe: PM
* </pre>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
@FormattableElement(format = "a")
public static final ZonalElement<Meridiem> AM_PM_OF_DAY =
AmPmElement.AM_PM_OF_DAY;
/**
* <p>Element with the hour of half day in the value range {@code 1-12}
* (dial on an analogue watch). </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. This is a
* limitation which preserves the compatibility with CLDR and the class
* {@code java.text.SimpleDateFormat}. In order to support the full
* hour range users can use the element {@link #ISO_HOUR}. In detail
* the mapping to ISO-hours looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>CLOCK_HOUR_OF_AMPM</td><td>12</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>1</td><td>...</td><td>11</td><td>12</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
/*[deutsch]
* <p>Element mit der Halbtagsstunde im Bereich {@code 1-12}
* (Ziffernblattanzeige einer analogen Uhr). </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Das ist
* eine Einschränkung, die die Kompatibilität mit CLDR und
* {@code java.text.SimpleDateFormat} wahrt. Um den vollen Stundenbereich
* zu unterstützen, sollte möglichst {@link #ISO_HOUR} verwendet
* werden. Im Detail sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>CLOCK_HOUR_OF_AMPM</td><td>12</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>1</td><td>...</td><td>11</td><td>12</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
@FormattableElement(format = "h")
public static final
AdjustableElement<Integer, PlainTime> CLOCK_HOUR_OF_AMPM =
IntegerTimeElement.createClockElement("CLOCK_HOUR_OF_AMPM", false);
/**
* <p>Element with the hour in the value range {@code 1-24} (analogue
* display). </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. This is a
* limitation which preserves the compatibility with CLDR and the class
* {@code java.text.SimpleDateFormat}. In order to support the full
* hour range users can use the element {@link #ISO_HOUR}. In detail
* the mapping to ISO-hours looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>CLOCK_HOUR_OF_DAY</td><td>24</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>13</td><td>...</td><td>23</td><td>24</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
/*[deutsch]
* <p>Element mit der Stunde im Bereich {@code 1-24} (analoge Anzeige). </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Das ist
* eine Einschränkung, die die Kompatibilität mit CLDR und
* {@code java.text.SimpleDateFormat} wahrt. Um den vollen Stundenbereich
* zu unterstützen, sollte möglichst {@link #ISO_HOUR} verwendet
* werden. Im Detail sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>CLOCK_HOUR_OF_DAY</td><td>24</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>13</td><td>...</td><td>23</td><td>24</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
@FormattableElement(format = "k")
public static final
AdjustableElement<Integer, PlainTime> CLOCK_HOUR_OF_DAY =
IntegerTimeElement.createClockElement("CLOCK_HOUR_OF_DAY", true);
/**
* <p>Element with the digital hour of half day in the value range
* {@code 0-11}. </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. This is a
* limitation which preserves the compatibility with CLDR and the class
* {@code java.text.SimpleDateFormat}. In order to support the full
* hour range users can use the element {@link #ISO_HOUR}. In detail
* the mapping to ISO-hours looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>DIGITAL_HOUR_OF_AMPM</td><td>0</td><td>1</td><td>...</td><td>11</td>
* <td>0</td><td>1</td><td>...</td><td>11</td><td>0</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
/*[deutsch]
* <p>Element mit der digitalen Halbtagsstunde im Bereich {@code 0-11}. </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Das ist
* eine Einschränkung, die die Kompatibilität mit CLDR und
* {@code java.text.SimpleDateFormat} wahrt. Um den vollen Stundenbereich
* zu unterstützen, sollte möglichst {@link #ISO_HOUR} verwendet
* werden. Im Detail sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>DIGITAL_HOUR_OF_AMPM</td><td>0</td><td>1</td><td>...</td><td>11</td>
* <td>0</td><td>1</td><td>...</td><td>11</td><td>0</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
@FormattableElement(format = "K")
public static final
ProportionalElement<Integer, PlainTime> DIGITAL_HOUR_OF_AMPM =
IntegerTimeElement.createTimeElement(
"DIGITAL_HOUR_OF_AMPM",
IntegerTimeElement.DIGITAL_HOUR_OF_AMPM,
0,
11,
'K');
/**
* <p>Element with the digital hour in the value range {@code 0-23}. </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. This is a
* limitation which preserves the compatibility with CLDR and the class
* {@code java.text.SimpleDateFormat}. In order to support the full
* hour range users can use the element {@link #ISO_HOUR}. In detail
* the mapping to ISO-hours looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>DIGITAL_HOUR_OF_DAY</td><td>0</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>13</td><td>...</td><td>23</td><td>0</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
/*[deutsch]
* <p>Element mit der digitalen Stunde im Bereich {@code 0-23}. </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Das ist
* eine Einschränkung, die die Kompatibilität mit CLDR und
* {@code java.text.SimpleDateFormat} wahrt. Um den vollen Stundenbereich
* zu unterstützen, sollte möglichst {@link #ISO_HOUR} verwendet
* werden. Im Detail sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>DIGITAL_HOUR_OF_DAY</td><td>0</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>13</td><td>...</td><td>23</td><td>0</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
@FormattableElement(format = "H")
public static final
ProportionalElement<Integer, PlainTime> DIGITAL_HOUR_OF_DAY =
IntegerTimeElement.createTimeElement(
"DIGITAL_HOUR_OF_DAY",
IntegerTimeElement.DIGITAL_HOUR_OF_DAY,
0,
23,
'H');
/**
* <p>Element with the ISO-8601-hour of day in the value range
* {@code 0-24}. </p>
*
* <p>Given a context of {@code PlainTime} with full hours, the maximum
* is {@code 24} and stands for the time 24:00 (midnight at end of day),
* else the maximum is {@code 23} in every different context. </p>
*
* @see #getHour()
*/
/*[deutsch]
* <p>Element mit der ISO-8601-Stunde im Bereich {@code 0-24}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Stunden ist das Maximum
* {@code 24} und steht für die Uhrzeit T24:00, ansonsten ist das
* Maximum in jedem anderen Kontext {@code 23}. </p>
*
* @see #getHour()
*/
public static final ProportionalElement<Integer, PlainTime> ISO_HOUR =
IntegerTimeElement.createTimeElement(
"ISO_HOUR",
IntegerTimeElement.ISO_HOUR,
0,
23,
'\u0000');
/**
* <p>Element with the minute of hour in the value range {@code 0-59}. </p>
*
* @see #getMinute()
*/
/*[deutsch]
* <p>Element mit der Minute im Bereich {@code 0-59}. </p>
*
* @see #getMinute()
*/
@FormattableElement(format = "m")
public static final ProportionalElement<Integer, PlainTime> MINUTE_OF_HOUR =
IntegerTimeElement.createTimeElement(
"MINUTE_OF_HOUR",
IntegerTimeElement.MINUTE_OF_HOUR,
0,
59,
'm');
/**
* <p>Element with the minute of day in the value range {@code 0-1440}. </p>
*
* <p>Given a context of {@code PlainTime} with full minutes, the maximum
* is {@code 1440} and stands for the time 24:00 (midnight at end of day),
* else the maximum is {@code 1439} in every different context. </p>
*/
/*[deutsch]
* <p>Element mit der Minute des Tages im Bereich {@code 0-1440}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Minuten ist das Maximum
* {@code 1440} und steht für die Uhrzeit T24:00, ansonsten ist das
* Maximum in jedem anderen Kontext {@code 1439}. </p>
*/
public static final ProportionalElement<Integer, PlainTime> MINUTE_OF_DAY =
IntegerTimeElement.createTimeElement(
"MINUTE_OF_DAY",
IntegerTimeElement.MINUTE_OF_DAY,
0,
1439,
'\u0000');
/**
* <p>Element with the second of minute in the value range
* {@code 0-59}. </p>
*
* <p>This element does not know any leapseconds in a local context and
* refers to a normal analogue clock. If this element is used in
* UTC-context ({@link Moment}) however then the value range is
* {@code 0-58/59/60} instead. </p>
*
* @see #getSecond()
*/
/*[deutsch]
* <p>Element mit der Sekunde im Bereich {@code 0-59}. </p>
*
* <p>Dieses Element kennt im lokalen Kontext keine UTC-Schaltsekunden und
* bezieht sich auf eine normale analoge Uhr. Wenn dieses Element im
* UTC-Kontext ({@link Moment}) verwendet wird, dann ist der Wertebereich
* stattdessen {@code 0-58/59/60}. </p>
*
* @see #getSecond()
*/
@FormattableElement(format = "s")
public static final
ProportionalElement<Integer, PlainTime> SECOND_OF_MINUTE =
IntegerTimeElement.createTimeElement(
"SECOND_OF_MINUTE",
IntegerTimeElement.SECOND_OF_MINUTE,
0,
59,
's');
/**
* <p>Element with the second of day in the value range
* {@code 0-86400}. </p>
*
* <p>Given a context of {@code PlainTime} with full seconds, the maximum
* is {@code 86400} and stands for the time 24:00 (midnight at end of day),
* else the maximum is {@code 86399} in every different context. Leapseconds
* are never counted. </p>
*/
/*[deutsch]
* <p>Element mit der Sekunde des Tages im Bereich
* {@code 0-86400}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Sekunden entspricht das
* Maximum {@code 86400} der Uhrzeit T24:00, in jedem anderen Kontext gilt
* {@code 86399}. UTC-Schaltsekunden werden nicht mitgezählt. </p>
*/
public static final
ProportionalElement<Integer, PlainTime> SECOND_OF_DAY =
IntegerTimeElement.createTimeElement(
"SECOND_OF_DAY",
IntegerTimeElement.SECOND_OF_DAY,
0,
86399,
'\u0000');
/**
* <p>Element with the millisecond in the value range {@code 0-999}. </p>
*/
/*[deutsch]
* <p>Element mit der Millisekunde im Bereich {@code 0-999}. </p>
*/
public static final
ProportionalElement<Integer, PlainTime> MILLI_OF_SECOND =
IntegerTimeElement.createTimeElement(
"MILLI_OF_SECOND",
IntegerTimeElement.MILLI_OF_SECOND,
0,
999,
'\u0000');
/**
* <p>Element with the microsecond in the value range {@code 0-999999}. </p>
*/
/*[deutsch]
* <p>Element mit der Mikrosekunde im Bereich {@code 0-999999}. </p>
*/
public static final
ProportionalElement<Integer, PlainTime> MICRO_OF_SECOND =
IntegerTimeElement.createTimeElement(
"MICRO_OF_SECOND",
IntegerTimeElement.MICRO_OF_SECOND,
0,
999999,
'\u0000');
/**
* <p>Element with the nanosecond in the value range
* {@code 0-999999999}. </p>
*/
/*[deutsch]
* <p>Element mit der Nanosekunde im Bereich {@code 0-999999999}. </p>
*/
@FormattableElement(format = "S")
public static final
ProportionalElement<Integer, PlainTime> NANO_OF_SECOND =
IntegerTimeElement.createTimeElement(
"NANO_OF_SECOND",
IntegerTimeElement.NANO_OF_SECOND,
0,
999999999,
'S');
/**
* <p>Element with the day time in milliseconds in the value range
* {@code 0-86400000}. </p>
*
* <p>Given a context of {@code PlainTime} with full milliseconds, the
* maximum is {@code 86400000} and stands for the time 24:00 (midnight at
* end of day), else the maximum is {@code 86399999} in every different
* context. Leapseconds are never counted. </p>
*/
/*[deutsch]
* <p>Element mit der Tageszeit in Millisekunden im
* Bereich {@code 0-86400000}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Millisekunden ist das
* Maximum {@code 86400000} (entsprechend der Uhrzeit T24:00), in jedem
* anderen Kontext ist das Maximum der Wert {@code 86399999}.
* UTC-Schaltsekunden werden nicht mitgezählt. </p>
*/
@FormattableElement(format = "A")
public static final
ProportionalElement<Integer, PlainTime> MILLI_OF_DAY =
IntegerTimeElement.createTimeElement(
"MILLI_OF_DAY",
IntegerTimeElement.MILLI_OF_DAY,
0,
86399999,
'A');
/**
* <p>Element with the day time in microseconds in the value range
* {@code 0-86400000000}. </p>
*
* <p>Given a context of {@code PlainTime} with full microseconds, the
* maximum is {@code 86400000000} and stands for the time 24:00 (midnight
* at end of day), else the maximum is {@code 86399999999} in every
* different context. Leapseconds are never counted. </p>
*/
/*[deutsch]
* <p>Element mit der Tageszeit in Mikrosekunden im
* Bereich {@code 0-86400000000}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Mikrosekunden ist das
* Maximum {@code 86400000000} (entsprechend der Uhrzeit T24:00), in jedem
* anderen Kontext ist das Maximum der Wert {@code 86399999999}.
* UTC-Schaltsekunden werden nicht mitgezählt. </p>
*/
public static final
ProportionalElement<Long, PlainTime> MICRO_OF_DAY =
LongElement.create("MICRO_OF_DAY", 0L, 86399999999L);
/**
* <p>Element with the day time in nanoseconds in the value range
* {@code 0-86400000000000}. </p>
*
* <p>Given any context of {@code PlainTime}, the maximum is always
* {@code 86400000000000} and stands for the time 24:00 (midnight
* at end of day), else the maximum is {@code 86399999999999} in every
* different context. Leapseconds are never counted. </p>
*
* <p>Example: </p>
*
* <pre>
* import static net.time4j.ClockUnit.HOURS;
* import static net.time4j.PlainTime.NANO_OF_DAY;
*
* PlainTime time =
* PlainTime.midnightAtStartOfDay().plus(6, HOURS); // T06:00
* System.out.println(
* time.get(NANO_OF_DAY.ratio())
* .multiply(BigDecimal.ofHour(100)).stripTrailingZeros()
* + "% of day are over.");
* // Output: 25% of day are over.
* </pre>
*/
/*[deutsch]
* <p>Element mit der Tageszeit in Nanosekunden im
* Bereich {@code 0-86400000000000}. </p>
*
* <p>Im Kontext von {@code PlainTime} ist das Maximum stets
* {@code 86400000000000} (entsprechend der Uhrzeit T24:00), in jedem
* anderen Kontext ist das Maximum der Wert {@code 86399999999999}.
* UTC-Schaltsekunden werden nicht mitgezählt. </p>
*
* <p>Beispiel: </p>
*
* <pre>
* import static net.time4j.ClockUnit.HOURS;
* import static net.time4j.PlainTime.NANO_OF_DAY;
*
* PlainTime time =
* PlainTime.midnightAtStartOfDay().plus(6, HOURS); // T06:00
* System.out.println(
* time.get(NANO_OF_DAY.ratio())
* .multiply(BigDecimal.ofHour(100)).stripTrailingZeros()
* + "% of day are over.");
* // Ausgabe: 25% of day are over.
* </pre>
*/
public static final
ProportionalElement<Long, PlainTime> NANO_OF_DAY =
LongElement.create("NANO_OF_DAY", 0L, 86399999999999L);
/**
* <p>Decimal hour in the value range {@code 0.0} inclusive until
* {@code 24.0} exclusive (inclusive in {@code PlainTime}). </p>
*
* <p>This element does not define any base unit. </p>
*/
/*[deutsch]
* <p>Dezimal-Stunde im Wertebereich {@code 0.0} inklusive bis
* {@code 24.0} exklusive (inklusive in {@code PlainTime}). </p>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
public static final ZonalElement<BigDecimal> DECIMAL_HOUR =
new DecimalTimeElement("DECIMAL_HOUR", DECIMAL_23_9);
/**
* <p>Decimal minute in the value range {@code 0.0} inclusive until
* {@code 60.0} exclusive. </p>
*
* <p>This element does not define any base unit. </p>
*/
/*[deutsch]
* <p>Dezimal-Minute im Wertebereich {@code 0.0} inklusive bis
* {@code 60.0} exklusive. </p>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
public static final ZonalElement<BigDecimal> DECIMAL_MINUTE =
new DecimalTimeElement("DECIMAL_MINUTE", DECIMAL_59_9);
/**
* <p>Decimal second in the value range {@code 0.0} inclusive until
* {@code 60.0} exclusive. </p>
*
* <p>This element does not define any base unit. </p>
*/
/*[deutsch]
* <p>Dezimal-Sekunde im Wertebereich {@code 0.0} inklusive bis
* {@code 60.0} exklusive. </p>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
public static final ZonalElement<BigDecimal> DECIMAL_SECOND =
new DecimalTimeElement("DECIMAL_SECOND", DECIMAL_59_9);
/**
* <p>Defines the precision as the smallest non-zero time element and
* truncates time parts of higher precision if necessary. </p>
*
* <p>Setting higher precisions than available is without any effect.
* But setting lower precisions can truncate data however. Examples: </p>
*
* <pre>
* // reading of precision -------------------------------------
* PlainTime time = PlainTime.of(12, 26, 52, 987654000);
* System.out.println(time.get(PRECISION)); // Output: MICROS
*
* // setting of precision -------------------------------------
* PlainTime time = PlainTime.of(12, 26, 52, 987654000);
* System.out.println(time.with(PRECISION, ClockUnit.MILLIS));
* // Output: T12:26:52,987
* </pre>
*
* <p>This element does not define any base unit. </p>
*/
/*[deutsch]
* <p>Definiert die Genauigkeit als das kleinste von {@code 0} verschiedene
* Uhrzeitelement und schneidet bei Bedarf zu genaue Zeitanteile ab. </p>
*
* <p>Beim Setzen der Genauigkeit ist zu beachten, daß eine
* höhere Genauigkeit wirkungslos ist. Das Setzen einer kleineren
* Genauigkeit hingegen schneidet Daten ab. Beispiele: </p>
*
* <pre>
* // Lesen der Genauigkeit ------------------------------------
* PlainTime time = PlainTime.of(12, 26, 52, 987654000);
* System.out.println(time.get(PRECISION)); // Ausgabe: MICROS
*
* // Setzen der Genauigkeit -----------------------------------
* PlainTime time = PlainTime.of(12, 26, 52, 987654000);
* System.out.println(time.with(PRECISION, ClockUnit.MILLIS));
* // Ausgabe: T12:26:52,987
* </pre>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
public static final ChronoElement<ClockUnit> PRECISION =
PrecisionElement.PRECISION;
// Dient der Serialisierungsunterstützung.
private static final Map<String, Object> ELEMENTS;
static {
Map<String, Object> constants = new HashMap<String, Object>();
fill(constants, WALL_TIME);
fill(constants, AM_PM_OF_DAY);
fill(constants, CLOCK_HOUR_OF_AMPM);
fill(constants, CLOCK_HOUR_OF_DAY);
fill(constants, DIGITAL_HOUR_OF_AMPM);
fill(constants, DIGITAL_HOUR_OF_DAY);
fill(constants, ISO_HOUR);
fill(constants, MINUTE_OF_HOUR);
fill(constants, MINUTE_OF_DAY);
fill(constants, SECOND_OF_MINUTE);
fill(constants, SECOND_OF_DAY);
fill(constants, MILLI_OF_SECOND);
fill(constants, MICRO_OF_SECOND);
fill(constants, NANO_OF_SECOND);
fill(constants, MILLI_OF_DAY);
fill(constants, MICRO_OF_DAY);
fill(constants, NANO_OF_DAY);
fill(constants, DECIMAL_HOUR);
fill(constants, DECIMAL_MINUTE);
fill(constants, DECIMAL_SECOND);
fill(constants, PRECISION);
ELEMENTS = Collections.unmodifiableMap(constants);
}
private static final ElementRule<PlainTime, BigDecimal> H_DECIMAL_RULE =
new BigDecimalElementRule(DECIMAL_HOUR, DECIMAL_24_0);
private static final ElementRule<PlainTime, BigDecimal> M_DECIMAL_RULE =
new BigDecimalElementRule(DECIMAL_MINUTE, DECIMAL_59_9);
private static final ElementRule<PlainTime, BigDecimal> S_DECIMAL_RULE =
new BigDecimalElementRule(DECIMAL_SECOND, DECIMAL_59_9);
private static final TimeAxis<IsoTimeUnit, PlainTime> ENGINE;
static {
TimeAxis.Builder<IsoTimeUnit, PlainTime> builder =
TimeAxis.Builder.setUp(
IsoTimeUnit.class,
PlainTime.class,
new Merger(),
PlainTime.MIN,
PlainTime.MAX)
.appendElement(
WALL_TIME,
new TimeRule())
.appendElement(
AM_PM_OF_DAY,
new MeridiemRule())
.appendElement(
CLOCK_HOUR_OF_AMPM,
new IntegerElementRule(CLOCK_HOUR_OF_AMPM, 1, 12),
ClockUnit.HOURS)
.appendElement(
CLOCK_HOUR_OF_DAY,
new IntegerElementRule(CLOCK_HOUR_OF_DAY, 1, 24),
ClockUnit.HOURS)
.appendElement(
DIGITAL_HOUR_OF_AMPM,
new IntegerElementRule(DIGITAL_HOUR_OF_AMPM, 0, 11),
ClockUnit.HOURS)
.appendElement(
DIGITAL_HOUR_OF_DAY,
new IntegerElementRule(DIGITAL_HOUR_OF_DAY, 0, 23),
ClockUnit.HOURS)
.appendElement(
ISO_HOUR,
new IntegerElementRule(ISO_HOUR, 0, 24),
ClockUnit.HOURS)
.appendElement(
MINUTE_OF_HOUR,
new IntegerElementRule(MINUTE_OF_HOUR, 0, 59),
ClockUnit.MINUTES)
.appendElement(
MINUTE_OF_DAY,
new IntegerElementRule(MINUTE_OF_DAY, 0, 1440),
ClockUnit.MINUTES)
.appendElement(
SECOND_OF_MINUTE,
new IntegerElementRule(SECOND_OF_MINUTE, 0, 59),
ClockUnit.SECONDS)
.appendElement(
SECOND_OF_DAY,
new IntegerElementRule(SECOND_OF_DAY, 0, 86400),
ClockUnit.SECONDS)
.appendElement(
MILLI_OF_SECOND,
new IntegerElementRule(MILLI_OF_SECOND, 0, 999),
ClockUnit.MILLIS)
.appendElement(
MICRO_OF_SECOND,
new IntegerElementRule(MICRO_OF_SECOND, 0, 999999),
ClockUnit.MICROS)
.appendElement(
NANO_OF_SECOND,
new IntegerElementRule(NANO_OF_SECOND, 0, 999999999),
ClockUnit.NANOS)
.appendElement(
MILLI_OF_DAY,
new IntegerElementRule(MILLI_OF_DAY, 0, 86400000),
ClockUnit.MILLIS)
.appendElement(
MICRO_OF_DAY,
new LongElementRule(MICRO_OF_DAY, 0, 86400000000L),
ClockUnit.MICROS)
.appendElement(
NANO_OF_DAY,
new LongElementRule(NANO_OF_DAY, 0, 86400000000000L),
ClockUnit.NANOS)
.appendElement(
DECIMAL_HOUR,
H_DECIMAL_RULE)
.appendElement(
DECIMAL_MINUTE,
M_DECIMAL_RULE)
.appendElement(
DECIMAL_SECOND,
S_DECIMAL_RULE)
.appendElement(
PRECISION,
new PrecisionRule());
registerUnits(builder);
ENGINE = builder.build();
}
//~ Instanzvariablen --------------------------------------------------
private transient final byte hour;
private transient final byte minute;
private transient final byte second;
private transient final int nano;
//~ Konstruktoren -----------------------------------------------------
private PlainTime(
int hour,
int minute,
int second,
int nanosecond,
boolean validating
) {
super();
if (validating) {
checkHour(hour);
checkMinute(minute);
checkSecond(second);
checkNano(nanosecond);
if (
(hour == 24)
&& ((minute | second | nanosecond) != 0)
) {
throw new IllegalArgumentException("T24:00:00 exceeded.");
}
}
this.hour = (byte) hour;
this.minute = (byte) minute;
this.second = (byte) second;
this.nano = nanosecond;
}
//~ Methoden ----------------------------------------------------------
@Override
public int getHour() {
return this.hour;
}
@Override
public int getMinute() {
return this.minute;
}
@Override
public int getSecond() {
return this.second;
}
@Override
public int getNanosecond() {
return this.nano;
}
/**
* <p>Yields midnight at the start of the day. </p>
*
* @return midnight at the start of day T00:00
* @see #midnightAtEndOfDay()
*/
/*[deutsch]
* <p>Liefert Mitternacht zu Beginn des Tages. </p>
*
* @return midnight at the start of day T00:00
* @see #midnightAtEndOfDay()
*/
public static PlainTime midnightAtStartOfDay() {
return PlainTime.MIN;
}
/**
* <p>Yields midnight at the end of the day, that is midnight at
* the start of the following day. </p>
*
* @return midnight at the end of day T24:00
* @see #midnightAtStartOfDay()
*/
/*[deutsch]
* <p>Liefert Mitternacht zum Ende des Tages, das ist Mitternacht zum
* Start des Folgetags. </p>
*
* @return midnight at the end of day T24:00
* @see #midnightAtStartOfDay()
*/
public static PlainTime midnightAtEndOfDay() {
return PlainTime.MAX;
}
/**
* <p>Creates a wall time as full hour. </p>
*
* @param hour iso-hour of day in the range {@code 0-24}
* @return cached full hour
* @throws IllegalArgumentException if given hour is out of range
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit als volle Stunde. </p>
*
* @param hour iso-hour of day in the range {@code 0-24}
* @return cached full hour
* @throws IllegalArgumentException if given hour is out of range
*/
public static PlainTime of(int hour) {
checkHour(hour);
return HOURS[hour];
}
/**
* <p>Creates a wall time with hour and minute. </p>
*
* @param hour hour of day in the range {@code 0-23} or
* {@code 24} if the given minute equals to {@code 0}
* @param minute minute in the range {@code 0-59}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit mit Stunde und Minute. </p>
*
* @param hour hour of day in the range {@code 0-23} or
* {@code 24} if the given minute equals to {@code 0}
* @param minute minute in the range {@code 0-59}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
*/
public static PlainTime of(
int hour,
int minute
) {
if (minute == 0) {
return PlainTime.of(hour);
}
return new PlainTime(hour, minute, 0, 0, true);
}
/**
* <p>Creates a wall time with hour, minute and second. </p>
*
* @param hour hour in the range {@code 0-23} or {@code 24}
* if the other arguments are equal to {@code 0}
* @param minute minute in the range {@code 0-59}
* @param second second in the range {@code 0-59}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit mit Stunde, Minute und Sekunde. </p>
*
* @param hour hour in the range {@code 0-23} or {@code 24}
* if the other arguments are equal to {@code 0}
* @param minute minute in the range {@code 0-59}
* @param second second in the range {@code 0-59}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
*/
public static PlainTime of(
int hour,
int minute,
int second
) {
if ((minute | second) == 0) {
return PlainTime.of(hour);
}
return new PlainTime(hour, minute, second, 0, true);
}
/**
* <p>Creates a wall time with hour, minute, second and nanosecond. </p>
*
* @param hour hour in the range {@code 0-23} or {@code 24}
* if the other argumenta equal to {@code 0}
* @param minute minute in the range {@code 0-59}
* @param second second in the range {@code 0-59}
* @param nanosecond nanosecond in the range {@code 0-999,999,999}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
* @see #of(int)
* @see #of(int, int)
* @see #of(int, int, int)
* @see #NANO_OF_SECOND
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit mit Stunde, Minute, Sekunde und
* Nanosekunde. </p>
*
* @param hour hour in the range {@code 0-23} or {@code 24}
* if the other argumenta equal to {@code 0}
* @param minute minute in the range {@code 0-59}
* @param second second in the range {@code 0-59}
* @param nanosecond nanosecond in the range {@code 0-999,999,999}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
* @see #of(int)
* @see #of(int, int)
* @see #of(int, int, int)
* @see #NANO_OF_SECOND
*/
public static PlainTime of(
int hour,
int minute,
int second,
int nanosecond
) {
return PlainTime.of(hour, minute, second, nanosecond, true);
}
/**
* <p>Creates a wall time by given decimal hour. </p>
*
* @param decimal decimal hour of day in the range {@code [0.0-24.0]}
* @return new or cached wall time
* @throws IllegalArgumentException if the argument is out of range
* @see #DECIMAL_HOUR
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit auf Basis der angegebenen
* Dezimalstunde. </p>
*
* @param decimal decimal hour of day in the range {@code [0.0-24.0]}
* @return new or cached wall time
* @throws IllegalArgumentException if the argument is out of range
* @see #DECIMAL_HOUR
*/
public static PlainTime of(BigDecimal decimal) {
return H_DECIMAL_RULE.withValue(null, decimal, false);
}
/**
* <p>Common conversion method. </p>
*
* @param time ISO-time
* @return PlainTime
*/
/*[deutsch]
* <p>Allgemeine Konversionsmethode. </p>
*
* @param time ISO-time
* @return PlainTime
*/
public static PlainTime from(WallTime time) {
if (time instanceof PlainTime) {
return (PlainTime) time;
} else if (time instanceof PlainTimestamp) {
return ((PlainTimestamp) time).getWallTime();
} else {
return PlainTime.of(
time.getHour(),
time.getMinute(),
time.getSecond(),
time.getNanosecond());
}
}
/**
* <p>Rolls this time by the given duration (as amount and unit) and
* also counts possible day overflow. </p>
*
* @param amount amount to be added (maybe negative)
* @param unit time unit
* @return result of rolling including possible day overflow
* @see #plus(long, Object) plus(long, IsoTimeUnit)
*/
/*[deutsch]
* <p>Rollt die angegebene Dauer mit Betrag und Einheit zu dieser Uhrzeit
* auf und zählt dabei auch tageweise Überläufe. </p>
*
* @param amount amount to be added (maybe negative)
* @param unit time unit
* @return result of rolling including possible day overflow
* @see #plus(long, Object) plus(long, IsoTimeUnit)
*/
public DayCycles roll(
long amount,
ClockUnit unit
) {
return ClockUnitRule.addToWithOverflow(this, amount, unit);
}
/**
* <p>Creates a new formatter which uses the given pattern in the
* default locale for formatting and parsing plain times. </p>
*
* <p>Note: The formatter can be adjusted to other locales however. </p>
*
* @param formatPattern format definition as pattern
* @param patternType pattern dialect
* @return format object for formatting {@code PlainTime}-objects
* using system locale
* @throws IllegalArgumentException if resolving of pattern fails
* @see PatternType
* @see ChronoFormatter#with(Locale)
*/
/*[deutsch]
* <p>Erzeugt ein neues Format-Objekt mit Hilfe des angegebenen Musters
* in der Standard-Sprach- und Ländereinstellung. </p>
*
* <p>Das Format-Objekt kann an andere Sprachen angepasst werden. </p>
*
* @param formatPattern format definition as pattern
* @param patternType pattern dialect
* @return format object for formatting {@code PlainTime}-objects
* using system locale
* @throws IllegalArgumentException if resolving of pattern fails
* @see PatternType
* @see ChronoFormatter#with(Locale)
*/
public static ChronoFormatter<PlainTime> localFormatter(
String formatPattern,
ChronoPattern patternType
) {
return ChronoFormatter
.setUp(PlainTime.class, Locale.getDefault())
.addPattern(formatPattern, patternType)
.build();
}
/**
* <p>Creates a new formatter which uses the given display mode in the
* default locale for formatting and parsing plain times. </p>
*
* <p>Note: The formatter can be adjusted to other locales however. </p>
*
* @param mode formatting style
* @return format object for formatting {@code PlainTime}-objects
* using system locale
* @throws IllegalStateException if format pattern cannot be retrieved
* @see ChronoFormatter#with(Locale)
*/
/*[deutsch]
* <p>Erzeugt ein neues Format-Objekt mit Hilfe des angegebenen Stils
* in der Standard-Sprach- und Ländereinstellung. </p>
*
* <p>Das Format-Objekt kann an andere Sprachen angepasst werden. </p>
*
* @param mode formatting style
* @return format object for formatting {@code PlainTime}-objects
* using system locale
* @throws IllegalStateException if format pattern cannot be retrieved
* @see ChronoFormatter#with(Locale)
*/
public static ChronoFormatter<PlainTime> localFormatter(DisplayMode mode) {
int style = PatternType.getFormatStyle(mode);
DateFormat df = DateFormat.getTimeInstance(style);
String pattern = removeZones(PatternType.getFormatPattern(df));
return ChronoFormatter
.setUp(PlainTime.class, Locale.getDefault())
.addPattern(pattern, PatternType.SIMPLE_DATE_FORMAT)
.build();
}
/**
* <p>Creates a new formatter which uses the given pattern and locale
* for formatting and parsing plain times. </p>
*
* <p>Note: The formatter can be adjusted to other locales however. </p>
*
* @param formatPattern format definition as pattern
* @param patternType pattern dialect
* @param locale locale setting
* @return format object for formatting {@code PlainTime}-objects
* using given locale
* @throws IllegalArgumentException if resolving of pattern fails
* @see PatternType
* @see #localFormatter(String,ChronoPattern)
*/
/*[deutsch]
* <p>Erzeugt ein neues Format-Objekt mit Hilfe des angegebenen Musters
* in der angegebenen Sprach- und Ländereinstellung. </p>
*
* <p>Das Format-Objekt kann an andere Sprachen angepasst werden. </p>
*
* @param formatPattern format definition as pattern
* @param patternType pattern dialect
* @param locale locale setting
* @return format object for formatting {@code PlainTime}-objects
* using given locale
* @throws IllegalArgumentException if resolving of pattern fails
* @see PatternType
* @see #localFormatter(String,ChronoPattern)
*/
public static ChronoFormatter<PlainTime> formatter(
String formatPattern,
ChronoPattern patternType,
Locale locale
) {
return ChronoFormatter
.setUp(PlainTime.class, locale)
.addPattern(formatPattern, patternType)
.build();
}
/**
* <p>Creates a new formatter which uses the given display mode and locale
* for formatting and parsing plain times. </p>
*
* <p>Note: The formatter can be adjusted to other locales however. </p>
*
* @param mode formatting style
* @param locale locale setting
* @return format object for formatting {@code PlainTime}-objects
* using given locale
* @throws IllegalStateException if format pattern cannot be retrieved
* @see #localFormatter(DisplayMode)
*/
/*[deutsch]
* <p>Erzeugt ein neues Format-Objekt mit Hilfe des angegebenen Stils
* und in der angegebenen Sprach- und Ländereinstellung. </p>
*
* <p>Das Format-Objekt kann an andere Sprachen angepasst werden. </p>
*
* @param mode formatting style
* @param locale locale setting
* @return format object for formatting {@code PlainTime}-objects
* using given locale
* @throws IllegalStateException if format pattern cannot be retrieved
* @see #localFormatter(DisplayMode)
*/
public static ChronoFormatter<PlainTime> formatter(
DisplayMode mode,
Locale locale
) {
int style = PatternType.getFormatStyle(mode);
DateFormat df = DateFormat.getTimeInstance(style, locale);
String pattern = removeZones(PatternType.getFormatPattern(df));
return ChronoFormatter
.setUp(PlainTime.class, locale)
.addPattern(pattern, PatternType.SIMPLE_DATE_FORMAT)
.build();
}
/**
* <p>Compares the full state, that is hour, minute, second and nanosecond
* of this instance and given argument. </p>
*/
/*[deutsch]
* <p>Vergleicht alle Zeitzustandsattribute, nämlich Stunde, Minute,
* Sekunde und Nanosekunde. </p>
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof PlainTime) {
PlainTime that = (PlainTime) obj;
return (
(this.hour == that.hour)
&& (this.minute == that.minute)
&& (this.second == that.second)
&& (this.nano == that.nano)
);
} else {
return false;
}
}
/*[deutsch]
* <p>Basiert auf allen Zeitzustandsattributen. </p>
*/
@Override
public int hashCode() {
return (
this.hour
+ 60 * this.minute
+ 3600 * this.second
+ 37 * this.nano);
}
@Override
public boolean isBefore(PlainTime time) {
return (this.compareTo(time) < 0);
}
@Override
public boolean isAfter(PlainTime time) {
return (this.compareTo(time) > 0);
}
@Override
public boolean isSimultaneous(PlainTime time) {
return (this.compareTo(time) == 0);
}
/**
* <p>Is this instance at midnight, either at start or at end of day? </p>
*
* @return boolean
*/
/*[deutsch]
* <p>Liegt Mitternacht vor (am Anfang oder am Ende eines Tages)? </p>
*
* @return boolean
*/
public boolean isMidnight() {
return (this.isFullHour() && ((this.hour % 24) == 0));
}
/**
* <p>Defines a natural order which is solely based on the timeline
* order. </p>
*
* <p>The natural order is consistent with {@code equals()}. </p>
*
* @see #isBefore(PlainTime)
* @see #isAfter(PlainTime)
*/
/*[deutsch]
* <p>Definiert eine natürliche Ordnung, die auf der zeitlichen
* Position basiert. </p>
*
* <p>Der Vergleich ist konsistent mit {@code equals()}. </p>
*
* @see #isBefore(PlainTime)
* @see #isAfter(PlainTime)
*/
@Override
public int compareTo(PlainTime time) {
int delta = this.hour - time.hour;
if (delta == 0) {
delta = this.minute - time.minute;
if (delta == 0) {
delta = this.second - time.second;
if (delta == 0) {
delta = this.nano - time.nano;
}
}
}
return ((delta < 0) ? -1 : ((delta == 0) ? 0 : 1));
}
/**
* <p>Dependent on the precision of this instance, this method yields a
* canonical representation in one of following formats (CLDR-syntax): </p>
*
* <ul>
* <li>'T'HH</li>
* <li>'T'HH:mm</li>
* <li>'T'HH:mm:ss</li>
* <li>'T'HH:mm:ss,SSS</li>
* <li>'T'HH:mm:ss,SSSSSS</li>
* <li>'T'HH:mm:ss,SSSSSSSSS</li>
* </ul>
*
* <p>The fraction part will be preceded by a comma as recommended by ISO
* unless the system property "net.time4j.format.iso.decimal.dot"
* was set to "true". </p>
*
* @return canonical ISO-8601-formatted string
*/
/*[deutsch]
* <p>Liefert je nach Genauigkeit einen String in einem der folgenden
* Formate (CLDR-Syntax): </p>
*
* <ul>
* <li>'T'HH</li>
* <li>'T'HH:mm</li>
* <li>'T'HH:mm:ss</li>
* <li>'T'HH:mm:ss,SSS</li>
* <li>'T'HH:mm:ss,SSSSSS</li>
* <li>'T'HH:mm:ss,SSSSSSSSS</li>
* </ul>
*
* <p>Vor dem Sekundenbruchteil erscheint im Standardfall das Komma, es sei
* denn, die System-Property "net.time4j.format.iso.decimal.dot"
* wurde auf "true" gesetzt. </p>
*
* @return canonical ISO-8601-formatted string
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder(19);
sb.append('T');
append2Digits(this.hour, sb);
if ((this.minute | this.second | this.nano) != 0) {
sb.append(':');
append2Digits(this.minute, sb);
if ((this.second | this.nano) != 0) {
sb.append(':');
append2Digits(this.second, sb);
if (this.nano != 0) {
sb.append(ISO_DECIMAL_SEPARATOR);
String num = Integer.toString(this.nano);
int len;
if ((this.nano % MIO) == 0) {
len = 3;
} else if ((this.nano % KILO) == 0) {
len = 6;
} else {
len = 9;
}
for (int i = num.length(); i < 9; i++) {
sb.append('0');
}
for (
int i = 0, n = Math.min(len, num.length());
i < n;
i++
) {
sb.append(num.charAt(i));
}
}
}
}
return sb.toString();
}
/**
* <p>Provides a static access to the associated time axis respective
* chronology which contains the chronological rules. </p>
*
* @return chronological system as time axis (never {@code null})
*/
/*[deutsch]
* <p>Liefert die zugehörige Zeitachse, die alle notwendigen
* chronologischen Regeln enthält. </p>
*
* @return chronological system as time axis (never {@code null})
*/
public static TimeAxis<IsoTimeUnit, PlainTime> axis() {
return ENGINE;
}
/**
* @doctags.exclude
*/
@Override
protected TimeAxis<IsoTimeUnit, PlainTime> getChronology() {
return ENGINE;
}
/**
* @doctags.exclude
*/
@Override
protected PlainTime getContext() {
return this;
}
/**
* <p>Erzeugt eine neue Uhrzeit passend zur angegebenen absoluten Zeit. </p>
*
* @param ut unix time in seconds
* @param offset shift of local time relative to UTC
* @return new or cached wall time
*/
static PlainTime from(
UnixTime ut,
ZonalOffset offset
) {
long localSeconds = ut.getPosixTime() + offset.getIntegralAmount();
int localNanos = ut.getNanosecond() + offset.getFractionalAmount();
if (localNanos < 0) {
localNanos += MRD;
localSeconds--;
} else if (localNanos >= MRD) {
localNanos -= MRD;
localSeconds++;
}
int secondsOfDay = MathUtils.floorModulo(localSeconds, 86400);
int second = secondsOfDay % 60;
int minutesOfDay = secondsOfDay / 60;
int minute = minutesOfDay % 60;
int hour = minutesOfDay / 60;
return PlainTime.of(
hour,
minute,
second,
localNanos
);
}
/**
* <p>Dient der Serialisierungsunterstützung. </p>
*
* @param elementName name of element
* @return found element or {@code null}
*/
// optional
static Object lookupElement(String elementName) {
return ELEMENTS.get(elementName);
}
/**
* <p>Wird von SQL-TIMESTAMP gebraucht. </p>
*
* @param millisOfDay milliseconds of day
* @return new instance
*/
static PlainTime createFromMillis(int millisOfDay) {
return PlainTime.createFromMillis(millisOfDay, 0);
}
/**
* <p>Wird von der {@code ratio()}-Function des angegebenenElements
* aufgerufen. </p>
*
* @param element reference time element
* @return {@code true} if element maximum is reduced else {@code false}
*/
boolean hasReducedRange(ChronoElement<?> element) {
return (
((element == MILLI_OF_DAY) && ((this.nano % MIO) != 0))
|| ((element == ISO_HOUR) && !this.isFullHour())
|| ((element == MINUTE_OF_DAY) && !this.isFullMinute())
|| ((element == SECOND_OF_DAY) && (this.nano != 0))
|| ((element == MICRO_OF_DAY) && ((this.nano % KILO) != 0))
);
}
private static PlainTime of(
int hour,
int minute,
int second,
int nanosecond,
boolean validating
) {
if ((minute | second | nanosecond) == 0) {
if (validating) {
return PlainTime.of(hour);
} else {
return HOURS[hour];
}
}
return new PlainTime(hour, minute, second, nanosecond, validating);
}
private static void fill(
Map<String, Object> map,
ChronoElement<?> element
) {
map.put(element.name(), element);
}
private static void append2Digits(
int element,
StringBuilder sb
) {
if (element < 10) {
sb.append('0');
}
sb.append(element);
}
private static void checkHour(long hour) {
if (hour < 0 || hour > 24) {
throw new IllegalArgumentException(
"HOUR_OF_DAY out of range: " + hour);
}
}
private static void checkMinute(long minute) {
if (minute < 0 || minute > 59) {
throw new IllegalArgumentException(
"MINUTE_OF_HOUR out of range: " + minute);
}
}
private static void checkSecond(long second) {
if (second < 0 || second > 59) {
throw new IllegalArgumentException(
"SECOND_OF_MINUTE out of range: " + second);
}
}
private static void checkNano(int nano) {
if (nano < 0 || nano >= MRD) {
throw new IllegalArgumentException(
"NANO_OF_SECOND out of range: " + nano);
}
}
private static PlainTime createFromMillis(
int millisOfDay,
int micros
) {
int nanosecond = (millisOfDay % KILO) * MIO + micros;
int secondsOfDay = millisOfDay / KILO;
int second = secondsOfDay % 60;
int minutesOfDay = secondsOfDay / 60;
int minute = minutesOfDay % 60;
int hour = minutesOfDay / 60;
return PlainTime.of(hour, minute, second, nanosecond);
}
private static PlainTime createFromMicros(
long microsOfDay,
int nanos
) {
int nanosecond = ((int) (microsOfDay % MIO)) * KILO + nanos;
int secondsOfDay = (int) (microsOfDay / MIO);
int second = secondsOfDay % 60;
int minutesOfDay = secondsOfDay / 60;
int minute = minutesOfDay % 60;
int hour = minutesOfDay / 60;
return PlainTime.of(hour, minute, second, nanosecond);
}
private static PlainTime createFromNanos(long nanosOfDay) {
int nanosecond = (int) (nanosOfDay % MRD);
int secondsOfDay = (int) (nanosOfDay / MRD);
int second = secondsOfDay % 60;
int minutesOfDay = secondsOfDay / 60;
int minute = minutesOfDay % 60;
int hour = minutesOfDay / 60;
return PlainTime.of(hour, minute, second, nanosecond);
}
private long getNanoOfDay() {
return (
this.nano
+ this.second * 1L * MRD
+ this.minute * 60L * MRD
+ this.hour * 3600L * MRD
);
}
private boolean isFullHour() {
return ((this.minute | this.second | this.nano) == 0);
}
private boolean isFullMinute() {
return ((this.second | this.nano) == 0);
}
private static void registerUnits(TimeAxis.Builder<IsoTimeUnit, PlainTime> builder) {
Set<ClockUnit> convertibles = EnumSet.allOf(ClockUnit.class);
for (ClockUnit unit : ClockUnit.values()) {
builder.appendUnit(
unit,
new ClockUnitRule(unit),
unit.getLength(),
convertibles);
}
}
private static long floorMod(
long value,
long divisor
) {
long num =
(value >= 0)
? (value / divisor)
: (((value + 1) / divisor) - 1);
return (value - divisor * num);
}
private static long floorDiv(
long value,
long divisor
) {
if (value >= 0) {
return (value / divisor);
} else {
return ((value + 1) / divisor) - 1;
}
}
// JDK-Patterns hinten, mittig und vorne von Zeitzonen-Symbolen befreien
private static String removeZones(String pattern) {
String s = pattern.replace(" z", "");
if (s.charAt(s.length() - 1) == 'z') {
for (int i = s.length() - 1; i > 0; i--) {
if (s.charAt(i - 1) != 'z') {
s = s.substring(0, i).trim();
break;
}
}
}
if (s.charAt(0) == 'z') {
for (int i = 1; i < s.length(); i++) {
if (s.charAt(i) != 'z') {
s = s.substring(i).trim();
break;
}
}
}
return s;
}
/**
* @serialData Uses <a href="../../serialized-form.html#net.time4j.SPX">
* a dedicated serialization form</a> as proxy. The layout
* is bit-compressed. The first byte contains within the
* four most significant bits the type id {@code 2}. Then
* the data bytes for hour, minute, second and nanosecond
* follow (in last case int instead of byte). Is the precision
* limited to seconds, minutes or hours then the last non-zero
* byte will be bit-inverted by the operator (~), and the
* following bytes will be left out. The hour byte however
* is always written.
*
* Schematic algorithm:
*
* <pre>
* out.writeByte(2 << 4);
*
* if (time.nano == 0) {
* if (time.second == 0) {
* if (time.minute == 0) {
* out.writeByte(~time.hour);
* } else {
* out.writeByte(time.hour);
* out.writeByte(~time.minute);
* }
* } else {
* out.writeByte(time.hour);
* out.writeByte(time.minute);
* out.writeByte(~time.second);
* }
* } else {
* out.writeByte(time.hour);
* out.writeByte(time.minute);
* out.writeByte(time.second);
* out.writeInt(time.nano);
* }
* </pre>
*
* @return replacement object in serialization graph
*/
private Object writeReplace() {
return new SPX(this, SPX.TIME_TYPE);
}
/**
* @serialData Blocks because a serialization proxy is required.
* @param in object input stream
* @throws InvalidObjectException (always)
*/
private void readObject(ObjectInputStream in)
throws IOException {
throw new InvalidObjectException("Serialization proxy required.");
}
//~ Innere Klassen ----------------------------------------------------
private static class ClockUnitRule
implements UnitRule<PlainTime> {
//~ Instanzvariablen ----------------------------------------------
private final ClockUnit unit;
//~ Konstruktoren -------------------------------------------------
private ClockUnitRule(ClockUnit unit) {
super();
this.unit = unit;
}
//~ Methoden ------------------------------------------------------
@Override
public PlainTime addTo(
PlainTime context,
long amount
) {
if (amount == 0) {
return context;
}
return doAdd(PlainTime.class, this.unit, context, amount);
}
@Override
public long between(
PlainTime start,
PlainTime end
) {
long delta = (end.getNanoOfDay() - start.getNanoOfDay());
long factor;
switch (this.unit) {
case HOURS:
factor = MRD * 3600L;
break;
case MINUTES:
factor = MRD * 60L;
break;
case SECONDS:
factor = MRD;
break;
case MILLIS:
factor = MIO;
break;
case MICROS:
factor = KILO;
break;
case NANOS:
factor = 1;
break;
default:
throw new UnsupportedOperationException(this.unit.name());
}
return delta / factor;
}
private static DayCycles addToWithOverflow(
PlainTime context,
long amount,
ClockUnit unit
) {
if ((amount == 0) && (context.hour < 24)) {
return new DayCycles(0, context);
}
return doAdd(DayCycles.class, unit, context, amount);
}
private static <R> R doAdd(
Class<R> returnType,
ClockUnit unit,
PlainTime context,
long amount
) {
long hours;
long minutes;
long seconds;
long nanos;
int minute = context.minute;
int second = context.second;
int fraction = context.nano;
switch (unit) {
case HOURS:
hours = MathUtils.safeAdd(context.hour, amount);
break;
case MINUTES:
minutes = MathUtils.safeAdd(context.minute, amount);
hours =
MathUtils.safeAdd(
context.hour,
MathUtils.floorDivide(minutes, 60));
minute = MathUtils.floorModulo(minutes, 60);
break;
case SECONDS:
seconds = MathUtils.safeAdd(context.second, amount);
minutes =
MathUtils.safeAdd(
context.minute,
MathUtils.floorDivide(seconds, 60));
hours =
MathUtils.safeAdd(
context.hour,
MathUtils.floorDivide(minutes, 60));
minute = MathUtils.floorModulo(minutes, 60);
second = MathUtils.floorModulo(seconds, 60);
break;
case MILLIS:
return doAdd(
returnType,
ClockUnit.NANOS,
context,
MathUtils.safeMultiply(amount, MIO));
case MICROS:
return doAdd(
returnType,
ClockUnit.NANOS,
context,
MathUtils.safeMultiply(amount, KILO));
case NANOS:
nanos =
MathUtils.safeAdd(context.nano, amount);
seconds =
MathUtils.safeAdd(
context.second,
MathUtils.floorDivide(nanos, MRD));
minutes =
MathUtils.safeAdd(
context.minute,
MathUtils.floorDivide(seconds, 60));
hours =
MathUtils.safeAdd(
context.hour,
MathUtils.floorDivide(minutes, 60));
minute = MathUtils.floorModulo(minutes, 60);
second = MathUtils.floorModulo(seconds, 60);
fraction = MathUtils.floorModulo(nanos, MRD);
break;
default:
throw new UnsupportedOperationException(unit.name());
}
int hour = MathUtils.floorModulo(hours, 24);
PlainTime time;
if ((hour | minute | second | fraction) == 0) { // midnight
time = (
((amount > 0) && (returnType == PlainTime.class))
? PlainTime.MAX
: PlainTime.MIN);
} else {
time = PlainTime.of(hour, minute, second, fraction);
}
if (returnType == PlainTime.class) {
return returnType.cast(time);
} else {
long cycles = MathUtils.floorDivide(hours, 24);
return returnType.cast(new DayCycles(cycles, time));
}
}
}
private static class TimeRule
implements ElementRule<PlainTime, PlainTime> {
//~ Methoden ------------------------------------------------------
@Override
public PlainTime getValue(PlainTime context) {
return context;
}
@Override
public PlainTime withValue(
PlainTime context,
PlainTime value,
boolean lenient
) {
if (value == null) {
throw new NullPointerException("Missing time value.");
}
return value;
}
@Override
public boolean isValid(
PlainTime context,
PlainTime value
) {
return (value != null);
}
@Override
public PlainTime getMinimum(PlainTime context) {
return PlainTime.MIN;
}
@Override
public PlainTime getMaximum(PlainTime context) {
return PlainTime.MAX;
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return null;
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return null;
}
}
private static class PrecisionRule
implements ElementRule<PlainTime, ClockUnit> {
//~ Methoden ------------------------------------------------------
@Override
public ClockUnit getValue(PlainTime context) {
if (context.nano != 0) {
if ((context.nano % MIO) == 0) {
return ClockUnit.MILLIS;
} else if ((context.nano % KILO) == 0) {
return ClockUnit.MICROS;
} else {
return ClockUnit.NANOS;
}
} else if (context.second != 0) {
return ClockUnit.SECONDS;
} else if (context.minute != 0) {
return ClockUnit.MINUTES;
} else {
return ClockUnit.HOURS;
}
}
@Override
public PlainTime withValue(
PlainTime context,
ClockUnit value,
boolean lenient
) {
int ordinal = value.ordinal();
if (ordinal >= this.getValue(context).ordinal()) {
return context; // Kein Abschneiden notwendig!
}
switch (value) {
case HOURS:
return PlainTime.of(context.hour);
case MINUTES:
return PlainTime.of(context.hour, context.minute);
case SECONDS:
return PlainTime.of(
context.hour, context.minute, context.second);
case MILLIS:
return PlainTime.of(
context.hour,
context.minute,
context.second,
(context.nano / MIO) * MIO);
case MICROS:
return PlainTime.of(
context.hour,
context.minute,
context.second,
(context.nano / KILO) * KILO);
case NANOS:
return context; // Programm sollte nie hierher kommen!
default:
throw new UnsupportedOperationException(value.name());
}
}
@Override
public boolean isValid(
PlainTime context,
ClockUnit value
) {
return (value != null);
}
@Override
public ClockUnit getMinimum(PlainTime context) {
return ClockUnit.HOURS;
}
@Override
public ClockUnit getMaximum(PlainTime context) {
return ClockUnit.NANOS;
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return null;
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return null;
}
}
private static class MeridiemRule
implements ElementRule<PlainTime, Meridiem> {
//~ Methoden ------------------------------------------------------
@Override
public Meridiem getValue(PlainTime context) {
return Meridiem.ofHour(context.hour);
}
@Override
public PlainTime withValue(
PlainTime context,
Meridiem value,
boolean lenient
) {
int h = ((context.hour == 24) ? 0 : context.hour);
if (value == null) {
throw new NullPointerException("Missing am/pm-value.");
} else if (value == Meridiem.AM) {
if (h >= 12) {
h -= 12;
}
} else if (value == Meridiem.PM) {
if (h < 12) {
h += 12;
}
}
return PlainTime.of(
h,
context.minute,
context.second,
context.nano
);
}
@Override
public boolean isValid(
PlainTime context,
Meridiem value
) {
return (value != null);
}
@Override
public Meridiem getMinimum(PlainTime context) {
return Meridiem.AM;
}
@Override
public Meridiem getMaximum(PlainTime context) {
return Meridiem.PM;
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return DIGITAL_HOUR_OF_AMPM;
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return DIGITAL_HOUR_OF_AMPM;
}
}
private static class IntegerElementRule
implements ElementRule<PlainTime, Integer> {
//~ Instanzvariablen ----------------------------------------------
private final ChronoElement<Integer> element;
private final int index;
private final int min;
private final int max;
//~ Konstruktoren -------------------------------------------------
IntegerElementRule(
ChronoElement<Integer> element,
int min,
int max
) {
super();
this.element = element;
if (element instanceof IntegerTimeElement) {
this.index = ((IntegerTimeElement) element).getIndex();
} else {
this.index = -1;
}
this.min = min;
this.max = max;
}
//~ Methoden ------------------------------------------------------
@Override
public Integer getValue(PlainTime context) {
int ret;
switch (this.index) {
case IntegerTimeElement.CLOCK_HOUR_OF_AMPM:
ret = (context.hour % 12);
if (ret == 0) {
ret = 12;
}
break;
case IntegerTimeElement.CLOCK_HOUR_OF_DAY:
ret = context.hour % 24;
if (ret == 0) {
ret = 24;
}
break;
case IntegerTimeElement.DIGITAL_HOUR_OF_AMPM:
ret = (context.hour % 12);
break;
case IntegerTimeElement.DIGITAL_HOUR_OF_DAY:
ret = context.hour % 24;
break;
case IntegerTimeElement.ISO_HOUR:
ret = context.hour;
break;
case IntegerTimeElement.MINUTE_OF_HOUR:
ret = context.minute;
break;
case IntegerTimeElement.MINUTE_OF_DAY:
ret = context.hour * 60 + context.minute;
break;
case IntegerTimeElement.SECOND_OF_MINUTE:
ret = context.second;
break;
case IntegerTimeElement.SECOND_OF_DAY:
ret =
context.hour * 3600
+ context.minute * 60
+ context.second;
break;
case IntegerTimeElement.MILLI_OF_SECOND:
ret = (context.nano / MIO);
break;
case IntegerTimeElement.MICRO_OF_SECOND:
ret = (context.nano / KILO);
break;
case IntegerTimeElement.NANO_OF_SECOND:
ret = context.nano;
break;
case IntegerTimeElement.MILLI_OF_DAY:
ret = (int) (context.getNanoOfDay() / MIO);
break;
default:
throw new UnsupportedOperationException(
this.element.name());
}
return Integer.valueOf(ret);
}
@Override
public PlainTime withValue(
PlainTime context,
Integer value,
boolean lenient
) {
if (value == null) {
throw new NullPointerException("Missing element value.");
} else if (lenient) {
return this.withValueInLenientMode(context, value.intValue());
} else if (!this.isValid(context, value)) {
throw new IllegalArgumentException(
"Value out of range: " + value);
}
int h = context.hour;
int m = context.minute;
int s = context.second;
int f = context.nano;
int v = value.intValue();
switch (this.index) {
case IntegerTimeElement.CLOCK_HOUR_OF_AMPM:
v = ((v == 12) ? 0 : v);
h = (isAM(context) ? v : (v + 12));
break;
case IntegerTimeElement.CLOCK_HOUR_OF_DAY:
h = ((v == 24) ? 0 : v);
break;
case IntegerTimeElement.DIGITAL_HOUR_OF_AMPM:
h = (isAM(context) ? v : (v + 12));
break;
case IntegerTimeElement.DIGITAL_HOUR_OF_DAY:
h = v;
break;
case IntegerTimeElement.ISO_HOUR:
h = v;
break;
case IntegerTimeElement.MINUTE_OF_HOUR:
m = v;
break;
case IntegerTimeElement.MINUTE_OF_DAY:
h = v / 60;
m = v % 60;
break;
case IntegerTimeElement.SECOND_OF_MINUTE:
s = v;
break;
case IntegerTimeElement.SECOND_OF_DAY:
h = v / 3600;
int remainder = v % 3600;
m = remainder / 60;
s = remainder % 60;
break;
case IntegerTimeElement.MILLI_OF_SECOND:
f = v * MIO + (context.nano % MIO);
break;
case IntegerTimeElement.MICRO_OF_SECOND:
f = v * KILO + (context.nano % KILO);
break;
case IntegerTimeElement.NANO_OF_SECOND:
f = v;
break;
case IntegerTimeElement.MILLI_OF_DAY:
return PlainTime.createFromMillis(v, context.nano % MIO);
default:
throw new UnsupportedOperationException(
this.element.name());
}
return PlainTime.of(h, m, s, f);
}
@Override
public boolean isValid(
PlainTime context,
Integer value
) {
if (value == null) {
return false;
}
int v = value.intValue();
if ((v < this.min) || (v > this.max)) {
return false;
}
if (v == this.max) {
switch (this.index) {
case IntegerTimeElement.ISO_HOUR:
return context.isFullHour();
case IntegerTimeElement.MINUTE_OF_DAY:
return context.isFullMinute();
case IntegerTimeElement.SECOND_OF_DAY:
return (context.nano == 0);
case IntegerTimeElement.MILLI_OF_DAY:
return ((context.nano % MIO) == 0);
default:
// no-op
}
}
if (context.hour == 24) {
switch (this.index) {
case IntegerTimeElement.MINUTE_OF_HOUR:
case IntegerTimeElement.SECOND_OF_MINUTE:
case IntegerTimeElement.MILLI_OF_SECOND:
case IntegerTimeElement.MICRO_OF_SECOND:
case IntegerTimeElement.NANO_OF_SECOND:
return (v == 0);
default:
// no-op
}
}
return true;
}
@Override
public Integer getMinimum(PlainTime context) {
return Integer.valueOf(this.min);
}
@Override
public Integer getMaximum(PlainTime context) {
if (context.hour == 24) {
switch (this.index) {
case IntegerTimeElement.MINUTE_OF_HOUR:
case IntegerTimeElement.SECOND_OF_MINUTE:
case IntegerTimeElement.MILLI_OF_SECOND:
case IntegerTimeElement.MICRO_OF_SECOND:
case IntegerTimeElement.NANO_OF_SECOND:
return Integer.valueOf(0);
default:
// no-op
}
}
if (context.hasReducedRange(this.element)) {
return Integer.valueOf(this.max - 1);
}
return Integer.valueOf(this.max);
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return this.getChild(context);
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return this.getChild(context);
}
private ChronoElement<?> getChild(PlainTime context) {
switch (this.index) {
case IntegerTimeElement.CLOCK_HOUR_OF_AMPM:
case IntegerTimeElement.CLOCK_HOUR_OF_DAY:
case IntegerTimeElement.DIGITAL_HOUR_OF_AMPM:
case IntegerTimeElement.DIGITAL_HOUR_OF_DAY:
case IntegerTimeElement.ISO_HOUR:
return MINUTE_OF_HOUR;
case IntegerTimeElement.MINUTE_OF_HOUR:
case IntegerTimeElement.MINUTE_OF_DAY:
return SECOND_OF_MINUTE;
case IntegerTimeElement.SECOND_OF_MINUTE:
case IntegerTimeElement.SECOND_OF_DAY:
return NANO_OF_SECOND;
default:
return null;
}
}
private PlainTime withValueInLenientMode(
PlainTime context,
int value
) {
if (
(this.element == ISO_HOUR)
|| (this.element == DIGITAL_HOUR_OF_DAY)
|| (this.element == DIGITAL_HOUR_OF_AMPM)
) {
return context.plus(
MathUtils.safeSubtract(value, context.get(this.element)),
ClockUnit.HOURS);
} else if (this.element == MINUTE_OF_HOUR) {
return context.plus(
MathUtils.safeSubtract(value, context.minute),
ClockUnit.MINUTES);
} else if (this.element == SECOND_OF_MINUTE) {
return context.plus(
MathUtils.safeSubtract(value, context.second),
ClockUnit.SECONDS);
} else if (this.element == MILLI_OF_SECOND) {
return context.plus(
MathUtils.safeSubtract(
value, context.get(MILLI_OF_SECOND)),
ClockUnit.MILLIS);
} else if (this.element == MICRO_OF_SECOND) {
return context.plus(
MathUtils.safeSubtract(
value, context.get(MICRO_OF_SECOND)),
ClockUnit.MICROS);
} else if (this.element == NANO_OF_SECOND) {
return context.plus(
MathUtils.safeSubtract(value, context.nano),
ClockUnit.NANOS);
} else if (this.element == MILLI_OF_DAY) {
int remainder1 = MathUtils.floorModulo(value, 86400 * KILO);
int remainder2 = context.nano % MIO;
if ((remainder1 == 0) && (remainder2 == 0)) {
return (value > 0) ? PlainTime.MAX : PlainTime.MIN;
} else {
return PlainTime.createFromMillis(remainder1, remainder2);
}
} else if (this.element == MINUTE_OF_DAY) {
int remainder = MathUtils.floorModulo(value, 1440);
if ((remainder == 0) && context.isFullMinute()) {
return (value > 0) ? PlainTime.MAX : PlainTime.MIN;
} else {
return this.withValue(
context, Integer.valueOf(remainder), false);
}
} else if (this.element == SECOND_OF_DAY) {
int remainder = MathUtils.floorModulo(value, 86400);
if ((remainder == 0) && (context.nano == 0)) {
return (value > 0) ? PlainTime.MAX : PlainTime.MIN;
} else {
return this.withValue(
context, Integer.valueOf(remainder), false);
}
} else {
throw new UnsupportedOperationException(this.element.name());
}
}
private static boolean isAM(PlainTime context) {
return ((context.hour < 12) || (context.hour == 24));
}
}
private static class LongElementRule
implements ElementRule<PlainTime, Long> {
//~ Instanzvariablen ----------------------------------------------
private final ChronoElement<Long> element;
private final long min;
private final long max;
//~ Konstruktoren -------------------------------------------------
LongElementRule(
ChronoElement<Long> element,
long min,
long max
) {
super();
this.element = element;
this.min = min;
this.max = max;
}
//~ Methoden ------------------------------------------------------
@Override
public Long getValue(PlainTime context) {
long ret;
if (this.element == MICRO_OF_DAY) {
ret = (context.getNanoOfDay() / KILO);
} else { // NANO_OF_DAY
ret = context.getNanoOfDay();
}
return Long.valueOf(ret);
}
@Override
public PlainTime withValue(
PlainTime context,
Long value,
boolean lenient
) {
if (value == null) {
throw new NullPointerException("Missing element value.");
} else if (lenient) {
return this.withValueInLenientMode(context, value.longValue());
} else if (!this.isValid(context, value)) {
throw new IllegalArgumentException(
"Value out of range: " + value);
}
long v = value.longValue();
if (this.element == MICRO_OF_DAY) {
return PlainTime.createFromMicros(v, context.nano % KILO);
} else { // NANO_OF_DAY
return PlainTime.createFromNanos(v);
}
}
@Override
public boolean isValid(
PlainTime context,
Long value
) {
if (value == null) {
return false;
} else if (
(this.element == MICRO_OF_DAY)
&& (value.longValue() == this.max)
) {
return ((context.nano % KILO) == 0);
} else {
return (
(this.min <= value.longValue())
&& (value.longValue() <= this.max)
);
}
}
@Override
public Long getMinimum(PlainTime context) {
return Long.valueOf(this.min);
}
@Override
public Long getMaximum(PlainTime context) {
if (
(this.element == MICRO_OF_DAY)
&& ((context.nano % KILO) != 0)
) {
return Long.valueOf(this.max - 1);
}
return Long.valueOf(this.max);
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return null;
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return null;
}
private PlainTime withValueInLenientMode(
PlainTime context,
long value
) {
if (this.element == MICRO_OF_DAY) {
long remainder1 = floorMod(value, 86400L * MIO);
int remainder2 = context.nano % KILO;
return
((remainder1 == 0) && (remainder2 == 0) && (value > 0))
? PlainTime.MAX
: PlainTime.createFromMicros(remainder1, remainder2);
} else { // NANO_OF_DAY
long remainder = floorMod(value, 86400L * MRD);
return
((remainder == 0) && (value > 0))
? PlainTime.MAX
: PlainTime.createFromNanos(remainder);
}
}
}
private static class BigDecimalElementRule
implements ElementRule<PlainTime, BigDecimal> {
//~ Instanzvariablen ----------------------------------------------
private final ChronoElement<BigDecimal> element;
private final BigDecimal max;
//~ Konstruktoren -------------------------------------------------
BigDecimalElementRule(
ChronoElement<BigDecimal> element,
BigDecimal max
) {
super();
this.element = element;
this.max = max;
}
//~ Methoden ------------------------------------------------------
@Override
public BigDecimal getValue(PlainTime context) {
BigDecimal val;
if (this.element == DECIMAL_HOUR) {
if (context.equals(PlainTime.MIN)) {
return BigDecimal.ZERO;
} else if (context.hour == 24) {
return DECIMAL_24_0;
}
val =
BigDecimal.valueOf(context.hour)
.add(div(BigDecimal.valueOf(context.minute), DECIMAL_60))
.add(div(BigDecimal.valueOf(context.second), DECIMAL_3600))
.add(
div(
BigDecimal.valueOf(context.nano),
DECIMAL_3600.multiply(DECIMAL_MRD)));
} else if (this.element == DECIMAL_MINUTE) {
if (context.isFullHour()) {
return BigDecimal.ZERO;
}
val =
BigDecimal.valueOf(context.minute)
.add(div(BigDecimal.valueOf(context.second), DECIMAL_60))
.add(
div(
BigDecimal.valueOf(context.nano),
DECIMAL_60.multiply(DECIMAL_MRD)));
} else if (this.element == DECIMAL_SECOND) {
if (context.isFullMinute()) {
return BigDecimal.ZERO;
}
val =
BigDecimal.valueOf(context.second)
.add(div(BigDecimal.valueOf(context.nano), DECIMAL_MRD));
} else {
throw new UnsupportedOperationException(this.element.name());
}
return val.setScale(15, RoundingMode.FLOOR).stripTrailingZeros();
}
@Override
public PlainTime withValue(
PlainTime context,
BigDecimal value,
boolean lenient
) {
BigDecimal bd = value;
int h, m, s, f;
long hv;
if (this.element == DECIMAL_HOUR) {
BigDecimal intH = bd.setScale(0, RoundingMode.FLOOR);
BigDecimal fractionalM = bd.subtract(intH).multiply(DECIMAL_60);
BigDecimal intM = fractionalM.setScale(0, RoundingMode.FLOOR);
BigDecimal fractionalS =
fractionalM.subtract(intM).multiply(DECIMAL_60);
BigDecimal intS = fractionalS.setScale(0, RoundingMode.FLOOR);
hv = intH.longValueExact();
m = intM.intValue();
s = intS.intValue();
f = toNano(fractionalS.subtract(intS));
} else if (this.element == DECIMAL_MINUTE) {
BigDecimal totalM = bd.setScale(0, RoundingMode.FLOOR);
BigDecimal fractionalS =
bd.subtract(totalM).multiply(DECIMAL_60);
BigDecimal intS = fractionalS.setScale(0, RoundingMode.FLOOR);
s = intS.intValue();
f = toNano(fractionalS.subtract(intS));
long minutes = totalM.longValueExact();
hv = context.hour;
if (lenient) {
hv += MathUtils.floorDivide(minutes, 60);
m = MathUtils.floorModulo(minutes, 60);
} else {
checkMinute(minutes);
m = (int) minutes;
}
} else if (this.element == DECIMAL_SECOND) {
BigDecimal totalS = bd.setScale(0, RoundingMode.FLOOR);
f = toNano(bd.subtract(totalS));
long seconds = totalS.longValueExact();
hv = context.hour;
m = context.minute;
if (lenient) {
s = MathUtils.floorModulo(seconds, 60);
long minutes = m + MathUtils.floorDivide(seconds, 60);
hv += MathUtils.floorDivide(minutes, 60);
m = MathUtils.floorModulo(minutes, 60);
} else {
checkSecond(seconds);
s = (int) seconds;
}
} else {
throw new UnsupportedOperationException(this.element.name());
}
if (lenient) {
h = MathUtils.floorModulo(hv, 24);
if ((hv > 0) && ((h | m | s | f) == 0)) {
return PlainTime.MAX;
}
} else if (hv < 0 || hv > 24) {
throw new IllegalArgumentException(
"Value out of range: " + value);
} else {
h = (int) hv;
}
return PlainTime.of(h, m, s, f);
}
@Override
public boolean isValid(
PlainTime context,
BigDecimal value
) {
if (value == null) {
return false;
}
if (context.hour == 24) {
if (
(this.element == DECIMAL_MINUTE)
|| (this.element == DECIMAL_SECOND)
) {
return (BigDecimal.ZERO.compareTo(value) == 0);
}
}
return (
(BigDecimal.ZERO.compareTo(value) <= 0)
&& (this.max.compareTo(value) >= 0)
);
}
@Override
public BigDecimal getMinimum(PlainTime context) {
return BigDecimal.ZERO;
}
@Override
public BigDecimal getMaximum(PlainTime context) {
if (context.hour == 24) {
if (
(this.element == DECIMAL_MINUTE)
|| (this.element == DECIMAL_SECOND)
) {
return BigDecimal.ZERO;
}
}
return this.max;
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return null; // never called
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return null; // never called
}
private static BigDecimal div(
BigDecimal value,
BigDecimal factor
) {
return value.divide(factor, 16, RoundingMode.FLOOR);
}
private static int toNano(BigDecimal fractionOfSecond) {
// Dezimalwert fast immer etwas zu klein => Aufrunden notwendig
BigDecimal result =
fractionOfSecond.movePointRight(9).setScale(
0,
RoundingMode.HALF_UP);
return Math.min(MRD - 1, result.intValue());
}
}
private static class Merger
implements ChronoMerger<PlainTime> {
//~ Methoden ------------------------------------------------------
@Override
public PlainTime createFrom(
TimeSource<?> clock,
final AttributeQuery attributes
) {
Timezone zone;
if (attributes.contains(Attributes.TIMEZONE_ID)) {
zone = Timezone.of(attributes.get(Attributes.TIMEZONE_ID));
} else {
zone = Timezone.ofSystem();
}
final UnixTime ut = clock.currentTime();
return PlainTime.from(ut, zone.getOffset(ut));
}
// Löst bevorzugt Elemente auf, die in Format-Patterns vorkommen
@Override
public PlainTime createFrom(
ChronoEntity<?> entity,
AttributeQuery attributes,
boolean preparsing
) {
if (entity instanceof UnixTime) {
return PlainTimestamp.axis()
.createFrom(entity, attributes, preparsing).getWallTime();
}
// Uhrzeit bereits vorhanden? -------------------------------------
if (entity.contains(WALL_TIME)) {
return entity.get(WALL_TIME);
}
// Stundenteil ----------------------------------------------------
if (entity.contains(DECIMAL_HOUR)) {
return PlainTime.of(entity.get(DECIMAL_HOUR));
}
Leniency leniency =
attributes.get(Attributes.LENIENCY, Leniency.SMART);
int hour = 0;
if (entity.contains(ISO_HOUR)) {
hour = entity.get(ISO_HOUR).intValue();
} else {
Integer h = readHour(entity);
if (h == null) {
return readSpecialCases(entity);
}
hour = h.intValue();
if (
(hour == 24)
&& !leniency.isLax()
) {
flagValidationError(
entity,
"Time 24:00 not allowed, "
+ "use lax mode or element ISO_HOUR instead.");
return null;
}
}
// Minutenteil ----------------------------------------------------
if (entity.contains(DECIMAL_MINUTE)) {
return M_DECIMAL_RULE.withValue(
PlainTime.of(hour),
entity.get(DECIMAL_MINUTE),
false
);
}
int minute = 0;
if (entity.contains(MINUTE_OF_HOUR)) {
minute = entity.get(MINUTE_OF_HOUR).intValue();
}
// Sekundenteil ---------------------------------------------------
if (entity.contains(DECIMAL_SECOND)) {
return S_DECIMAL_RULE.withValue(
PlainTime.of(hour, minute),
entity.get(DECIMAL_SECOND),
false
);
}
int second = 0;
if (entity.contains(SECOND_OF_MINUTE)) {
second = entity.get(SECOND_OF_MINUTE).intValue();
}
// Nanoteil -------------------------------------------------------
int nanosecond = 0;
if (entity.contains(NANO_OF_SECOND)) {
nanosecond = entity.get(NANO_OF_SECOND).intValue();
} else if (entity.contains(MICRO_OF_SECOND)) {
nanosecond = entity.get(MICRO_OF_SECOND).intValue() * KILO;
} else if (entity.contains(MILLI_OF_SECOND)) {
nanosecond = entity.get(MILLI_OF_SECOND).intValue() * MIO;
}
// Ergebnis aus Stunde, Minute, Sekunde und Nano ------------------
if (leniency.isLax()) {
long total =
MathUtils.safeAdd(
MathUtils.safeMultiply(
MathUtils.safeAdd(
MathUtils.safeAdd(
MathUtils.safeMultiply(hour, 3600L),
MathUtils.safeMultiply(minute, 60L)),
second
),
MRD
),
nanosecond
);
long nanoOfDay = floorMod(total, 86400L * MRD);
long overflow = floorDiv(total, 86400L * MRD);
if (
(overflow != 0)
&& entity.isValid(LongElement.DAY_OVERFLOW, overflow)
) {
entity.with(LongElement.DAY_OVERFLOW, overflow);
}
if ((nanoOfDay == 0) && (overflow > 0)) {
return PlainTime.MAX;
} else {
return PlainTime.createFromNanos(nanoOfDay);
}
} else if (
(hour >= 0)
&& (minute >= 0)
&& (second >= 0)
&& (nanosecond >= 0)
&& (
((hour == 24) && (minute | second | nanosecond) == 0))
|| (
(hour < 24)
&& (minute <= 59)
&& (second <= 59)
&& (nanosecond <= MRD))
) {
return PlainTime.of(hour, minute, second, nanosecond, false);
} else {
flagValidationError(entity, "Time component out of range.");
return null;
}
}
private static Integer readHour(ChronoEntity<?> entity) {
int hour;
if (entity.contains(DIGITAL_HOUR_OF_DAY)) {
hour = entity.get(DIGITAL_HOUR_OF_DAY).intValue();
} else if (entity.contains(CLOCK_HOUR_OF_DAY)) {
hour = entity.get(CLOCK_HOUR_OF_DAY).intValue();
if (hour == 24) {
hour = 0;
}
} else if (entity.contains(AM_PM_OF_DAY)) {
Meridiem ampm = entity.get(AM_PM_OF_DAY);
if (entity.contains(DIGITAL_HOUR_OF_AMPM)) {
int h = entity.get(DIGITAL_HOUR_OF_AMPM).intValue();
hour = ((ampm == Meridiem.AM) ? h : h + 12);
} else if (
entity.contains(CLOCK_HOUR_OF_AMPM)
) {
int h = entity.get(CLOCK_HOUR_OF_AMPM).intValue();
if (h == 12) {
h = 0;
}
hour = ((ampm == Meridiem.AM) ? h : h + 12);
} else {
return null;
}
} else {
return null;
}
return Integer.valueOf(hour);
}
private static PlainTime readSpecialCases(ChronoEntity<?> entity) {
if (entity.contains(NANO_OF_DAY)) { // Threeten-Symbol N
long nanoOfDay = entity.get(NANO_OF_DAY).longValue();
if ((nanoOfDay < 0) || (nanoOfDay > 86400L * MRD)) {
flagValidationError(
entity,
"NANO_OF_DAY out of range: " + nanoOfDay);
return null;
}
return PlainTime.createFromNanos(nanoOfDay);
} else if (entity.contains(MICRO_OF_DAY)) {
int nanos = 0;
if (entity.contains(NANO_OF_SECOND)) {
nanos = entity.get(NANO_OF_SECOND).intValue() % KILO;
}
return PlainTime.createFromMicros(
entity.get(MICRO_OF_DAY).longValue(),
nanos
);
} else if (entity.contains(MILLI_OF_DAY)) { // CLDR-Symbol A
int submillis = 0;
if (entity.contains(NANO_OF_SECOND)) {
int nanoOfSecond = entity.get(NANO_OF_SECOND).intValue();
if ((nanoOfSecond < 0) || (nanoOfSecond >= MRD)) {
flagValidationError(
entity,
"NANO_OF_SECOND out of range: " + nanoOfSecond);
return null;
}
submillis = nanoOfSecond % MIO;
} else if (entity.contains(MICRO_OF_SECOND)) {
int microOfSecond = entity.get(MICRO_OF_SECOND).intValue();
if ((microOfSecond < 0) || (microOfSecond >= MIO)) {
flagValidationError(
entity,
"MICRO_OF_SECOND out of range: " + microOfSecond);
return null;
}
submillis = microOfSecond % KILO;
}
int milliOfDay = entity.get(MILLI_OF_DAY).intValue();
if ((milliOfDay < 0) || (milliOfDay > 86400 * KILO)) {
flagValidationError(
entity,
"MILLI_OF_DAY out of range: " + milliOfDay);
return null;
}
return PlainTime.createFromMillis(milliOfDay, submillis);
} else if (entity.contains(SECOND_OF_DAY)) {
int nanos = 0;
if (entity.contains(NANO_OF_SECOND)) {
nanos = entity.get(NANO_OF_SECOND).intValue();
} else if (entity.contains(MICRO_OF_SECOND)) {
nanos = entity.get(MICRO_OF_SECOND).intValue() * KILO;
} else if (entity.contains(MILLI_OF_SECOND)) {
nanos = entity.get(MILLI_OF_SECOND).intValue() * MIO;
}
return PlainTime.of(0, 0, 0, nanos).with(
SECOND_OF_DAY,
entity.get(SECOND_OF_DAY));
} else if (entity.contains(MINUTE_OF_DAY)) {
int nanos = 0;
if (entity.contains(NANO_OF_SECOND)) {
nanos = entity.get(NANO_OF_SECOND).intValue();
} else if (entity.contains(MICRO_OF_SECOND)) {
nanos = entity.get(MICRO_OF_SECOND).intValue() * KILO;
} else if (entity.contains(MILLI_OF_SECOND)) {
nanos = entity.get(MILLI_OF_SECOND).intValue() * MIO;
}
int secs = 0;
if (entity.contains(SECOND_OF_MINUTE)) {
secs = entity.get(SECOND_OF_MINUTE).intValue();
}
return PlainTime.of(0, 0, secs, nanos).with(
MINUTE_OF_DAY,
entity.get(MINUTE_OF_DAY));
}
return null;
}
private static void flagValidationError(
ChronoEntity<?> entity,
String message
) {
if (entity.isValid(ValidationElement.ERROR_MESSAGE, message)) {
entity.with(ValidationElement.ERROR_MESSAGE, message);
}
}
@Override
public ChronoDisplay preformat(
PlainTime context,
AttributeQuery attributes
) {
return context;
}
@Override
public Chronology<?> preparser() {
return null;
}
}
}
| core/src/main/java/net/time4j/PlainTime.java | /*
* -----------------------------------------------------------------------
* Copyright © 2013-2015 Meno Hochschild, <http://www.menodata.de/>
* -----------------------------------------------------------------------
* This file (PlainTime.java) is part of project Time4J.
*
* Time4J is free software: You can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 2.1 of the License, or
* (at your option) any later version.
*
* Time4J is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Time4J. If not, see <http://www.gnu.org/licenses/>.
* -----------------------------------------------------------------------
*/
package net.time4j;
import net.time4j.base.MathUtils;
import net.time4j.base.TimeSource;
import net.time4j.base.UnixTime;
import net.time4j.base.WallTime;
import net.time4j.engine.AttributeQuery;
import net.time4j.engine.ChronoDisplay;
import net.time4j.engine.ChronoElement;
import net.time4j.engine.ChronoEntity;
import net.time4j.engine.ChronoMerger;
import net.time4j.engine.Chronology;
import net.time4j.engine.ElementRule;
import net.time4j.engine.FormattableElement;
import net.time4j.engine.Temporal;
import net.time4j.engine.TimeAxis;
import net.time4j.engine.TimePoint;
import net.time4j.engine.UnitRule;
import net.time4j.engine.ValidationElement;
import net.time4j.format.Attributes;
import net.time4j.format.CalendarType;
import net.time4j.format.ChronoFormatter;
import net.time4j.format.ChronoPattern;
import net.time4j.format.DisplayMode;
import net.time4j.format.Leniency;
import net.time4j.tz.Timezone;
import net.time4j.tz.ZonalOffset;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.DateFormat;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
/**
* <p>Represents a plain wall time without any timezone or date component
* as defined in ISO-8601 up to nanosecond precision. </p>
*
* <p>This type also supports the special value 24:00 in its state space.
* That value means midnight at the end of day and can be both set and
* queried. </p>
*
* <p>Following elements which are declared as constants are registered by
* this class: </p>
*
* <ul>
* <li>{@link #COMPONENT}</li>
* <li>{@link #AM_PM_OF_DAY}</li>
* <li>{@link #CLOCK_HOUR_OF_AMPM}</li>
* <li>{@link #CLOCK_HOUR_OF_DAY}</li>
* <li>{@link #DIGITAL_HOUR_OF_AMPM}</li>
* <li>{@link #DIGITAL_HOUR_OF_DAY}</li>
* <li>{@link #ISO_HOUR}</li>
* <li>{@link #MINUTE_OF_HOUR}</li>
* <li>{@link #MINUTE_OF_DAY}</li>
* <li>{@link #SECOND_OF_MINUTE}</li>
* <li>{@link #SECOND_OF_DAY}</li>
* <li>{@link #MILLI_OF_SECOND}</li>
* <li>{@link #MICRO_OF_SECOND}</li>
* <li>{@link #NANO_OF_SECOND}</li>
* <li>{@link #MILLI_OF_DAY}</li>
* <li>{@link #MICRO_OF_DAY}</li>
* <li>{@link #NANO_OF_DAY}</li>
* <li>{@link #PRECISION}</li>
* <li>{@link #DECIMAL_HOUR}</li>
* <li>{@link #DECIMAL_MINUTE}</li>
* <li>{@link #DECIMAL_SECOND}</li>
* </ul>
*
* @author Meno Hochschild
* @doctags.concurrency <immutable>
*/
/*[deutsch]
* <p>Repräsentiert eine reine Uhrzeit ohne Zeitzonen- oder Datumsteil
* nach dem ISO-8601-Standard in maximal Nanosekundengenauigkeit. </p>
*
* <p>Diese Klasse unterstützt auch den Spezialwert T24:00 in ihrem
* Zustandsraum, während die Klasse {@code PlainTimestamp} den Wert
* lediglich in der Instanzerzeugung, aber nicht in der Manipulation von
* Daten akzeptiert. </p>
*
* <p>Registriert sind folgende als Konstanten deklarierte Elemente: </p>
*
* <ul>
* <li>{@link #COMPONENT}</li>
* <li>{@link #AM_PM_OF_DAY}</li>
* <li>{@link #CLOCK_HOUR_OF_AMPM}</li>
* <li>{@link #CLOCK_HOUR_OF_DAY}</li>
* <li>{@link #DIGITAL_HOUR_OF_AMPM}</li>
* <li>{@link #DIGITAL_HOUR_OF_DAY}</li>
* <li>{@link #ISO_HOUR}</li>
* <li>{@link #MINUTE_OF_HOUR}</li>
* <li>{@link #MINUTE_OF_DAY}</li>
* <li>{@link #SECOND_OF_MINUTE}</li>
* <li>{@link #SECOND_OF_DAY}</li>
* <li>{@link #MILLI_OF_SECOND}</li>
* <li>{@link #MICRO_OF_SECOND}</li>
* <li>{@link #NANO_OF_SECOND}</li>
* <li>{@link #MILLI_OF_DAY}</li>
* <li>{@link #MICRO_OF_DAY}</li>
* <li>{@link #NANO_OF_DAY}</li>
* <li>{@link #PRECISION}</li>
* <li>{@link #DECIMAL_HOUR}</li>
* <li>{@link #DECIMAL_MINUTE}</li>
* <li>{@link #DECIMAL_SECOND}</li>
* </ul>
*
* @author Meno Hochschild
* @doctags.concurrency <immutable>
*/
@CalendarType("iso8601")
public final class PlainTime
extends TimePoint<IsoTimeUnit, PlainTime>
implements WallTime, Temporal<PlainTime> {
//~ Statische Felder/Initialisierungen --------------------------------
/**
* System-Property für die Darstellung des Dezimaltrennzeichens.
*/
static final char ISO_DECIMAL_SEPARATOR = (
Boolean.getBoolean("net.time4j.format.iso.decimal.dot")
? '.'
: ',' // Empfehlung des ISO-Standards
);
private static final int MRD = 1000000000;
private static final int MIO = 1000000;
private static final int KILO = 1000;
private static final BigDecimal DECIMAL_60 = new BigDecimal(60);
private static final BigDecimal DECIMAL_3600 = new BigDecimal(3600);
private static final BigDecimal DECIMAL_MRD = new BigDecimal(MRD);
private static final BigDecimal DECIMAL_24_0 =
new BigDecimal("24");
private static final BigDecimal DECIMAL_23_9 =
new BigDecimal("23.999999999999999");
private static final BigDecimal DECIMAL_59_9 =
new BigDecimal("59.999999999999999");
private static final PlainTime[] HOURS = new PlainTime[25];
private static final long serialVersionUID = 2780881537313863339L;
static {
for (int i = 0; i <= 24; i++) {
HOURS[i] = new PlainTime(i, 0, 0, 0, false);
}
}
/** Minimalwert. */
static final PlainTime MIN = HOURS[0];
/** Maximalwert. */
static final PlainTime MAX = HOURS[24];
/** Uhrzeitkomponente. */
static final ChronoElement<PlainTime> WALL_TIME = TimeElement.INSTANCE;
/**
* <p>Element with the wall time in the value range
* {@code [T00:00:00,000000000]} until {@code [T24:00:00,000000000]}
* (inclusive in the context of {@code PlainTime} else exclusive). </p>
*
* <p>Example of usage: </p>
*
* <pre>
* PlainTimestamp tsp =
* PlainTimestamp.localFormatter("uuuu-MM-dd", PatternType.CLDR)
* .withDefault(
* PlainTime.COMPONENT,
* PlainTime.midnightAtStartOfDay())
* .parse("2014-08-20");
* System.out.println(tsp); // output: 2014-08-20T00
* </pre>
*
* <p>Note: This element does not define any base unit. </p>
*
* @since 1.2
*/
/*[deutsch]
* <p>Element mit der Uhrzeit im Wertebereich {@code [T00:00:00,000000000]}
* bis {@code [T24:00:00,000000000]} (inklusive im Kontext von
* {@code PlainTime}, sonst exklusive). </p>
*
* <p>Beispiel: </p>
*
* <pre>
* PlainTimestamp tsp =
* PlainTimestamp.localFormatter("uuuu-MM-dd", PatternType.CLDR)
* .withDefault(
* PlainTime.COMPONENT,
* PlainTime.midnightAtStartOfDay())
* .parse("2014-08-20");
* System.out.println(tsp); // output: 2014-08-20T00
* </pre>
*
* <p>Hinweis: Dieses Element definiert keine Basiseinheit. </p>
*
* @since 1.2
*/
public static final WallTimeElement COMPONENT = TimeElement.INSTANCE;
/**
* <p>Element with the half day section relative to noon (ante meridiem
* or post meridiem). </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. In detail
* the mapping from hours to meridiem values looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>AM_PM_OF_DAY</td><td>AM</td><td>AM</td><td>...</td><td>AM</td>
* <td>PM</td><td>PM</td><td>...</td><td>PM</td><td>AM</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*
* <p>Example: </p>
*
* <pre>
* import static net.time4j.PlainTime.AM_PM_OF_DAY;
*
* PlainTime time = PlainTime.of(12, 45, 20);
* System.out.println(time.get(AM_PM_OF_DAY));
* // Output: PM
* </pre>
*
* <p>This element does not define a base unit. </p>
*/
/*[deutsch]
* <p>Element mit dem Tagesabschnitt relativ zur Mittagszeit (Vormittag
* oder Nachmittag). </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Im Detail
* sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>AM_PM_OF_DAY</td><td>AM</td><td>AM</td><td>...</td><td>AM</td>
* <td>PM</td><td>PM</td><td>...</td><td>PM</td><td>AM</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*
* <p>Anwendungsbeispiel: </p>
*
* <pre>
* import static net.time4j.PlainTime.AM_PM_OF_DAY;
*
* PlainTime time = PlainTime.of(12, 45, 20);
* System.out.println(time.get(AM_PM_OF_DAY));
* // Ausgabe: PM
* </pre>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
@FormattableElement(format = "a")
public static final ZonalElement<Meridiem> AM_PM_OF_DAY =
AmPmElement.AM_PM_OF_DAY;
/**
* <p>Element with the hour of half day in the value range {@code 1-12}
* (dial on an analogue watch). </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. This is a
* limitation which preserves the compatibility with CLDR and the class
* {@code java.text.SimpleDateFormat}. In order to support the full
* hour range users can use the element {@link #ISO_HOUR}. In detail
* the mapping to ISO-hours looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>CLOCK_HOUR_OF_AMPM</td><td>12</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>1</td><td>...</td><td>11</td><td>12</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
/*[deutsch]
* <p>Element mit der Halbtagsstunde im Bereich {@code 1-12}
* (Ziffernblattanzeige einer analogen Uhr). </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Das ist
* eine Einschränkung, die die Kompatibilität mit CLDR und
* {@code java.text.SimpleDateFormat} wahrt. Um den vollen Stundenbereich
* zu unterstützen, sollte möglichst {@link #ISO_HOUR} verwendet
* werden. Im Detail sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>CLOCK_HOUR_OF_AMPM</td><td>12</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>1</td><td>...</td><td>11</td><td>12</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
@FormattableElement(format = "h")
public static final
AdjustableElement<Integer, PlainTime> CLOCK_HOUR_OF_AMPM =
IntegerTimeElement.createClockElement("CLOCK_HOUR_OF_AMPM", false);
/**
* <p>Element with the hour in the value range {@code 1-24} (analogue
* display). </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. This is a
* limitation which preserves the compatibility with CLDR and the class
* {@code java.text.SimpleDateFormat}. In order to support the full
* hour range users can use the element {@link #ISO_HOUR}. In detail
* the mapping to ISO-hours looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>CLOCK_HOUR_OF_DAY</td><td>24</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>13</td><td>...</td><td>23</td><td>24</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
/*[deutsch]
* <p>Element mit der Stunde im Bereich {@code 1-24} (analoge Anzeige). </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Das ist
* eine Einschränkung, die die Kompatibilität mit CLDR und
* {@code java.text.SimpleDateFormat} wahrt. Um den vollen Stundenbereich
* zu unterstützen, sollte möglichst {@link #ISO_HOUR} verwendet
* werden. Im Detail sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>CLOCK_HOUR_OF_DAY</td><td>24</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>13</td><td>...</td><td>23</td><td>24</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
@FormattableElement(format = "k")
public static final
AdjustableElement<Integer, PlainTime> CLOCK_HOUR_OF_DAY =
IntegerTimeElement.createClockElement("CLOCK_HOUR_OF_DAY", true);
/**
* <p>Element with the digital hour of half day in the value range
* {@code 0-11}. </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. This is a
* limitation which preserves the compatibility with CLDR and the class
* {@code java.text.SimpleDateFormat}. In order to support the full
* hour range users can use the element {@link #ISO_HOUR}. In detail
* the mapping to ISO-hours looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>DIGITAL_HOUR_OF_AMPM</td><td>0</td><td>1</td><td>...</td><td>11</td>
* <td>0</td><td>1</td><td>...</td><td>11</td><td>0</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
/*[deutsch]
* <p>Element mit der digitalen Halbtagsstunde im Bereich {@code 0-11}. </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Das ist
* eine Einschränkung, die die Kompatibilität mit CLDR und
* {@code java.text.SimpleDateFormat} wahrt. Um den vollen Stundenbereich
* zu unterstützen, sollte möglichst {@link #ISO_HOUR} verwendet
* werden. Im Detail sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>DIGITAL_HOUR_OF_AMPM</td><td>0</td><td>1</td><td>...</td><td>11</td>
* <td>0</td><td>1</td><td>...</td><td>11</td><td>0</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
@FormattableElement(format = "K")
public static final
ProportionalElement<Integer, PlainTime> DIGITAL_HOUR_OF_AMPM =
IntegerTimeElement.createTimeElement(
"DIGITAL_HOUR_OF_AMPM",
IntegerTimeElement.DIGITAL_HOUR_OF_AMPM,
0,
11,
'K');
/**
* <p>Element with the digital hour in the value range {@code 0-23}. </p>
*
* <p>This element handles the value 24:00 in the same way as 00:00, hence
* does not make any difference between start and end of day. This is a
* limitation which preserves the compatibility with CLDR and the class
* {@code java.text.SimpleDateFormat}. In order to support the full
* hour range users can use the element {@link #ISO_HOUR}. In detail
* the mapping to ISO-hours looks like following: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legend</caption>
* <tr>
* <td>DIGITAL_HOUR_OF_DAY</td><td>0</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>13</td><td>...</td><td>23</td><td>0</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
/*[deutsch]
* <p>Element mit der digitalen Stunde im Bereich {@code 0-23}. </p>
*
* <p>Dieses Element behandelt die Zeit T24:00 genauso wie T00:00, macht
* also keinen Unterschied zwischen Anfang und Ende eines Tages. Das ist
* eine Einschränkung, die die Kompatibilität mit CLDR und
* {@code java.text.SimpleDateFormat} wahrt. Um den vollen Stundenbereich
* zu unterstützen, sollte möglichst {@link #ISO_HOUR} verwendet
* werden. Im Detail sieht die Stundenzuordnung so aus: </p>
*
* <div style="margin-top:5px;">
* <table border="1">
* <caption>Legende</caption>
* <tr>
* <td>DIGITAL_HOUR_OF_DAY</td><td>0</td><td>1</td><td>...</td><td>11</td>
* <td>12</td><td>13</td><td>...</td><td>23</td><td>0</td>
* </tr>
* <tr>
* <td>ISO-8601-Wert</td><td>T0</td><td>T1</td><td>...</td><td>T11</td>
* <td>T12</td><td>T13</td><td>...</td><td>T23</td><td>T24</td>
* </tr>
* </table>
* </div>
*/
@FormattableElement(format = "H")
public static final
ProportionalElement<Integer, PlainTime> DIGITAL_HOUR_OF_DAY =
IntegerTimeElement.createTimeElement(
"DIGITAL_HOUR_OF_DAY",
IntegerTimeElement.DIGITAL_HOUR_OF_DAY,
0,
23,
'H');
/**
* <p>Element with the ISO-8601-hour of day in the value range
* {@code 0-24}. </p>
*
* <p>Given a context of {@code PlainTime} with full hours, the maximum
* is {@code 24} and stands for the time 24:00 (midnight at end of day),
* else the maximum is {@code 23} in every different context. </p>
*
* @see #getHour()
*/
/*[deutsch]
* <p>Element mit der ISO-8601-Stunde im Bereich {@code 0-24}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Stunden ist das Maximum
* {@code 24} und steht für die Uhrzeit T24:00, ansonsten ist das
* Maximum in jedem anderen Kontext {@code 23}. </p>
*
* @see #getHour()
*/
public static final ProportionalElement<Integer, PlainTime> ISO_HOUR =
IntegerTimeElement.createTimeElement(
"ISO_HOUR",
IntegerTimeElement.ISO_HOUR,
0,
23,
'\u0000');
/**
* <p>Element with the minute of hour in the value range {@code 0-59}. </p>
*
* @see #getMinute()
*/
/*[deutsch]
* <p>Element mit der Minute im Bereich {@code 0-59}. </p>
*
* @see #getMinute()
*/
@FormattableElement(format = "m")
public static final ProportionalElement<Integer, PlainTime> MINUTE_OF_HOUR =
IntegerTimeElement.createTimeElement(
"MINUTE_OF_HOUR",
IntegerTimeElement.MINUTE_OF_HOUR,
0,
59,
'm');
/**
* <p>Element with the minute of day in the value range {@code 0-1440}. </p>
*
* <p>Given a context of {@code PlainTime} with full minutes, the maximum
* is {@code 1440} and stands for the time 24:00 (midnight at end of day),
* else the maximum is {@code 1439} in every different context. </p>
*/
/*[deutsch]
* <p>Element mit der Minute des Tages im Bereich {@code 0-1440}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Minuten ist das Maximum
* {@code 1440} und steht für die Uhrzeit T24:00, ansonsten ist das
* Maximum in jedem anderen Kontext {@code 1439}. </p>
*/
public static final ProportionalElement<Integer, PlainTime> MINUTE_OF_DAY =
IntegerTimeElement.createTimeElement(
"MINUTE_OF_DAY",
IntegerTimeElement.MINUTE_OF_DAY,
0,
1439,
'\u0000');
/**
* <p>Element with the second of minute in the value range
* {@code 0-59}. </p>
*
* <p>This element does not know any leapseconds in a local context and
* refers to a normal analogue clock. If this element is used in
* UTC-context ({@link Moment}) however then the value range is
* {@code 0-58/59/60} instead. </p>
*
* @see #getSecond()
*/
/*[deutsch]
* <p>Element mit der Sekunde im Bereich {@code 0-59}. </p>
*
* <p>Dieses Element kennt im lokalen Kontext keine UTC-Schaltsekunden und
* bezieht sich auf eine normale analoge Uhr. Wenn dieses Element im
* UTC-Kontext ({@link Moment}) verwendet wird, dann ist der Wertebereich
* stattdessen {@code 0-58/59/60}. </p>
*
* @see #getSecond()
*/
@FormattableElement(format = "s")
public static final
ProportionalElement<Integer, PlainTime> SECOND_OF_MINUTE =
IntegerTimeElement.createTimeElement(
"SECOND_OF_MINUTE",
IntegerTimeElement.SECOND_OF_MINUTE,
0,
59,
's');
/**
* <p>Element with the second of day in the value range
* {@code 0-86400}. </p>
*
* <p>Given a context of {@code PlainTime} with full seconds, the maximum
* is {@code 86400} and stands for the time 24:00 (midnight at end of day),
* else the maximum is {@code 86399} in every different context. Leapseconds
* are never counted. </p>
*/
/*[deutsch]
* <p>Element mit der Sekunde des Tages im Bereich
* {@code 0-86400}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Sekunden entspricht das
* Maximum {@code 86400} der Uhrzeit T24:00, in jedem anderen Kontext gilt
* {@code 86399}. UTC-Schaltsekunden werden nicht mitgezählt. </p>
*/
public static final
ProportionalElement<Integer, PlainTime> SECOND_OF_DAY =
IntegerTimeElement.createTimeElement(
"SECOND_OF_DAY",
IntegerTimeElement.SECOND_OF_DAY,
0,
86399,
'\u0000');
/**
* <p>Element with the millisecond in the value range {@code 0-999}. </p>
*/
/*[deutsch]
* <p>Element mit der Millisekunde im Bereich {@code 0-999}. </p>
*/
public static final
ProportionalElement<Integer, PlainTime> MILLI_OF_SECOND =
IntegerTimeElement.createTimeElement(
"MILLI_OF_SECOND",
IntegerTimeElement.MILLI_OF_SECOND,
0,
999,
'\u0000');
/**
* <p>Element with the microsecond in the value range {@code 0-999999}. </p>
*/
/*[deutsch]
* <p>Element mit der Mikrosekunde im Bereich {@code 0-999999}. </p>
*/
public static final
ProportionalElement<Integer, PlainTime> MICRO_OF_SECOND =
IntegerTimeElement.createTimeElement(
"MICRO_OF_SECOND",
IntegerTimeElement.MICRO_OF_SECOND,
0,
999999,
'\u0000');
/**
* <p>Element with the nanosecond in the value range
* {@code 0-999999999}. </p>
*/
/*[deutsch]
* <p>Element mit der Nanosekunde im Bereich {@code 0-999999999}. </p>
*/
@FormattableElement(format = "S")
public static final
ProportionalElement<Integer, PlainTime> NANO_OF_SECOND =
IntegerTimeElement.createTimeElement(
"NANO_OF_SECOND",
IntegerTimeElement.NANO_OF_SECOND,
0,
999999999,
'S');
/**
* <p>Element with the day time in milliseconds in the value range
* {@code 0-86400000}. </p>
*
* <p>Given a context of {@code PlainTime} with full milliseconds, the
* maximum is {@code 86400000} and stands for the time 24:00 (midnight at
* end of day), else the maximum is {@code 86399999} in every different
* context. Leapseconds are never counted. </p>
*/
/*[deutsch]
* <p>Element mit der Tageszeit in Millisekunden im
* Bereich {@code 0-86400000}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Millisekunden ist das
* Maximum {@code 86400000} (entsprechend der Uhrzeit T24:00), in jedem
* anderen Kontext ist das Maximum der Wert {@code 86399999}.
* UTC-Schaltsekunden werden nicht mitgezählt. </p>
*/
@FormattableElement(format = "A")
public static final
ProportionalElement<Integer, PlainTime> MILLI_OF_DAY =
IntegerTimeElement.createTimeElement(
"MILLI_OF_DAY",
IntegerTimeElement.MILLI_OF_DAY,
0,
86399999,
'A');
/**
* <p>Element with the day time in microseconds in the value range
* {@code 0-86400000000}. </p>
*
* <p>Given a context of {@code PlainTime} with full microseconds, the
* maximum is {@code 86400000000} and stands for the time 24:00 (midnight
* at end of day), else the maximum is {@code 86399999999} in every
* different context. Leapseconds are never counted. </p>
*/
/*[deutsch]
* <p>Element mit der Tageszeit in Mikrosekunden im
* Bereich {@code 0-86400000000}. </p>
*
* <p>Im Kontext von {@code PlainTime} mit vollen Mikrosekunden ist das
* Maximum {@code 86400000000} (entsprechend der Uhrzeit T24:00), in jedem
* anderen Kontext ist das Maximum der Wert {@code 86399999999}.
* UTC-Schaltsekunden werden nicht mitgezählt. </p>
*/
public static final
ProportionalElement<Long, PlainTime> MICRO_OF_DAY =
LongElement.create("MICRO_OF_DAY", 0L, 86399999999L);
/**
* <p>Element with the day time in nanoseconds in the value range
* {@code 0-86400000000000}. </p>
*
* <p>Given any context of {@code PlainTime}, the maximum is always
* {@code 86400000000000} and stands for the time 24:00 (midnight
* at end of day), else the maximum is {@code 86399999999999} in every
* different context. Leapseconds are never counted. </p>
*
* <p>Example: </p>
*
* <pre>
* import static net.time4j.ClockUnit.HOURS;
* import static net.time4j.PlainTime.NANO_OF_DAY;
*
* PlainTime time =
* PlainTime.midnightAtStartOfDay().plus(6, HOURS); // T06:00
* System.out.println(
* time.get(NANO_OF_DAY.ratio())
* .multiply(BigDecimal.ofHour(100)).stripTrailingZeros()
* + "% of day are over.");
* // Output: 25% of day are over.
* </pre>
*/
/*[deutsch]
* <p>Element mit der Tageszeit in Nanosekunden im
* Bereich {@code 0-86400000000000}. </p>
*
* <p>Im Kontext von {@code PlainTime} ist das Maximum stets
* {@code 86400000000000} (entsprechend der Uhrzeit T24:00), in jedem
* anderen Kontext ist das Maximum der Wert {@code 86399999999999}.
* UTC-Schaltsekunden werden nicht mitgezählt. </p>
*
* <p>Beispiel: </p>
*
* <pre>
* import static net.time4j.ClockUnit.HOURS;
* import static net.time4j.PlainTime.NANO_OF_DAY;
*
* PlainTime time =
* PlainTime.midnightAtStartOfDay().plus(6, HOURS); // T06:00
* System.out.println(
* time.get(NANO_OF_DAY.ratio())
* .multiply(BigDecimal.ofHour(100)).stripTrailingZeros()
* + "% of day are over.");
* // Ausgabe: 25% of day are over.
* </pre>
*/
public static final
ProportionalElement<Long, PlainTime> NANO_OF_DAY =
LongElement.create("NANO_OF_DAY", 0L, 86399999999999L);
/**
* <p>Decimal hour in the value range {@code 0.0} inclusive until
* {@code 24.0} exclusive (inclusive in {@code PlainTime}). </p>
*
* <p>This element does not define any base unit. </p>
*/
/*[deutsch]
* <p>Dezimal-Stunde im Wertebereich {@code 0.0} inklusive bis
* {@code 24.0} exklusive (inklusive in {@code PlainTime}). </p>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
public static final ZonalElement<BigDecimal> DECIMAL_HOUR =
new DecimalTimeElement("DECIMAL_HOUR", DECIMAL_23_9);
/**
* <p>Decimal minute in the value range {@code 0.0} inclusive until
* {@code 60.0} exclusive. </p>
*
* <p>This element does not define any base unit. </p>
*/
/*[deutsch]
* <p>Dezimal-Minute im Wertebereich {@code 0.0} inklusive bis
* {@code 60.0} exklusive. </p>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
public static final ZonalElement<BigDecimal> DECIMAL_MINUTE =
new DecimalTimeElement("DECIMAL_MINUTE", DECIMAL_59_9);
/**
* <p>Decimal second in the value range {@code 0.0} inclusive until
* {@code 60.0} exclusive. </p>
*
* <p>This element does not define any base unit. </p>
*/
/*[deutsch]
* <p>Dezimal-Sekunde im Wertebereich {@code 0.0} inklusive bis
* {@code 60.0} exklusive. </p>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
public static final ZonalElement<BigDecimal> DECIMAL_SECOND =
new DecimalTimeElement("DECIMAL_SECOND", DECIMAL_59_9);
/**
* <p>Defines the precision as the smallest non-zero time element and
* truncates time parts of higher precision if necessary. </p>
*
* <p>Setting higher precisions than available is without any effect.
* But setting lower precisions can truncate data however. Examples: </p>
*
* <pre>
* // reading of precision -------------------------------------
* PlainTime time = PlainTime.of(12, 26, 52, 987654000);
* System.out.println(time.get(PRECISION)); // Output: MICROS
*
* // setting of precision -------------------------------------
* PlainTime time = PlainTime.of(12, 26, 52, 987654000);
* System.out.println(time.with(PRECISION, ClockUnit.MILLIS));
* // Output: T12:26:52,987
* </pre>
*
* <p>This element does not define any base unit. </p>
*/
/*[deutsch]
* <p>Definiert die Genauigkeit als das kleinste von {@code 0} verschiedene
* Uhrzeitelement und schneidet bei Bedarf zu genaue Zeitanteile ab. </p>
*
* <p>Beim Setzen der Genauigkeit ist zu beachten, daß eine
* höhere Genauigkeit wirkungslos ist. Das Setzen einer kleineren
* Genauigkeit hingegen schneidet Daten ab. Beispiele: </p>
*
* <pre>
* // Lesen der Genauigkeit ------------------------------------
* PlainTime time = PlainTime.of(12, 26, 52, 987654000);
* System.out.println(time.get(PRECISION)); // Ausgabe: MICROS
*
* // Setzen der Genauigkeit -----------------------------------
* PlainTime time = PlainTime.of(12, 26, 52, 987654000);
* System.out.println(time.with(PRECISION, ClockUnit.MILLIS));
* // Ausgabe: T12:26:52,987
* </pre>
*
* <p>Dieses Element definiert keine Basiseinheit. </p>
*/
public static final ChronoElement<ClockUnit> PRECISION =
PrecisionElement.PRECISION;
// Dient der Serialisierungsunterstützung.
private static final Map<String, Object> ELEMENTS;
static {
Map<String, Object> constants = new HashMap<String, Object>();
fill(constants, WALL_TIME);
fill(constants, AM_PM_OF_DAY);
fill(constants, CLOCK_HOUR_OF_AMPM);
fill(constants, CLOCK_HOUR_OF_DAY);
fill(constants, DIGITAL_HOUR_OF_AMPM);
fill(constants, DIGITAL_HOUR_OF_DAY);
fill(constants, ISO_HOUR);
fill(constants, MINUTE_OF_HOUR);
fill(constants, MINUTE_OF_DAY);
fill(constants, SECOND_OF_MINUTE);
fill(constants, SECOND_OF_DAY);
fill(constants, MILLI_OF_SECOND);
fill(constants, MICRO_OF_SECOND);
fill(constants, NANO_OF_SECOND);
fill(constants, MILLI_OF_DAY);
fill(constants, MICRO_OF_DAY);
fill(constants, NANO_OF_DAY);
fill(constants, DECIMAL_HOUR);
fill(constants, DECIMAL_MINUTE);
fill(constants, DECIMAL_SECOND);
fill(constants, PRECISION);
ELEMENTS = Collections.unmodifiableMap(constants);
}
private static final ElementRule<PlainTime, BigDecimal> H_DECIMAL_RULE =
new BigDecimalElementRule(DECIMAL_HOUR, DECIMAL_24_0);
private static final ElementRule<PlainTime, BigDecimal> M_DECIMAL_RULE =
new BigDecimalElementRule(DECIMAL_MINUTE, DECIMAL_59_9);
private static final ElementRule<PlainTime, BigDecimal> S_DECIMAL_RULE =
new BigDecimalElementRule(DECIMAL_SECOND, DECIMAL_59_9);
private static final TimeAxis<IsoTimeUnit, PlainTime> ENGINE;
static {
TimeAxis.Builder<IsoTimeUnit, PlainTime> builder =
TimeAxis.Builder.setUp(
IsoTimeUnit.class,
PlainTime.class,
new Merger(),
PlainTime.MIN,
PlainTime.MAX)
.appendElement(
WALL_TIME,
new TimeRule())
.appendElement(
AM_PM_OF_DAY,
new MeridiemRule())
.appendElement(
CLOCK_HOUR_OF_AMPM,
new IntegerElementRule(CLOCK_HOUR_OF_AMPM, 1, 12),
ClockUnit.HOURS)
.appendElement(
CLOCK_HOUR_OF_DAY,
new IntegerElementRule(CLOCK_HOUR_OF_DAY, 1, 24),
ClockUnit.HOURS)
.appendElement(
DIGITAL_HOUR_OF_AMPM,
new IntegerElementRule(DIGITAL_HOUR_OF_AMPM, 0, 11),
ClockUnit.HOURS)
.appendElement(
DIGITAL_HOUR_OF_DAY,
new IntegerElementRule(DIGITAL_HOUR_OF_DAY, 0, 23),
ClockUnit.HOURS)
.appendElement(
ISO_HOUR,
new IntegerElementRule(ISO_HOUR, 0, 24),
ClockUnit.HOURS)
.appendElement(
MINUTE_OF_HOUR,
new IntegerElementRule(MINUTE_OF_HOUR, 0, 59),
ClockUnit.MINUTES)
.appendElement(
MINUTE_OF_DAY,
new IntegerElementRule(MINUTE_OF_DAY, 0, 1440),
ClockUnit.MINUTES)
.appendElement(
SECOND_OF_MINUTE,
new IntegerElementRule(SECOND_OF_MINUTE, 0, 59),
ClockUnit.SECONDS)
.appendElement(
SECOND_OF_DAY,
new IntegerElementRule(SECOND_OF_DAY, 0, 86400),
ClockUnit.SECONDS)
.appendElement(
MILLI_OF_SECOND,
new IntegerElementRule(MILLI_OF_SECOND, 0, 999),
ClockUnit.MILLIS)
.appendElement(
MICRO_OF_SECOND,
new IntegerElementRule(MICRO_OF_SECOND, 0, 999999),
ClockUnit.MICROS)
.appendElement(
NANO_OF_SECOND,
new IntegerElementRule(NANO_OF_SECOND, 0, 999999999),
ClockUnit.NANOS)
.appendElement(
MILLI_OF_DAY,
new IntegerElementRule(MILLI_OF_DAY, 0, 86400000),
ClockUnit.MILLIS)
.appendElement(
MICRO_OF_DAY,
new LongElementRule(MICRO_OF_DAY, 0, 86400000000L),
ClockUnit.MICROS)
.appendElement(
NANO_OF_DAY,
new LongElementRule(NANO_OF_DAY, 0, 86400000000000L),
ClockUnit.NANOS)
.appendElement(
DECIMAL_HOUR,
H_DECIMAL_RULE)
.appendElement(
DECIMAL_MINUTE,
M_DECIMAL_RULE)
.appendElement(
DECIMAL_SECOND,
S_DECIMAL_RULE)
.appendElement(
PRECISION,
new PrecisionRule());
registerUnits(builder);
ENGINE = builder.build();
}
//~ Instanzvariablen --------------------------------------------------
private transient final byte hour;
private transient final byte minute;
private transient final byte second;
private transient final int nano;
//~ Konstruktoren -----------------------------------------------------
private PlainTime(
int hour,
int minute,
int second,
int nanosecond,
boolean validating
) {
super();
if (validating) {
checkHour(hour);
checkMinute(minute);
checkSecond(second);
checkNano(nanosecond);
if (
(hour == 24)
&& ((minute | second | nanosecond) != 0)
) {
throw new IllegalArgumentException("T24:00:00 exceeded.");
}
}
this.hour = (byte) hour;
this.minute = (byte) minute;
this.second = (byte) second;
this.nano = nanosecond;
}
//~ Methoden ----------------------------------------------------------
@Override
public int getHour() {
return this.hour;
}
@Override
public int getMinute() {
return this.minute;
}
@Override
public int getSecond() {
return this.second;
}
@Override
public int getNanosecond() {
return this.nano;
}
/**
* <p>Yields midnight at the start of the day. </p>
*
* @return midnight at the start of day T00:00
* @see #midnightAtEndOfDay()
*/
/*[deutsch]
* <p>Liefert Mitternacht zu Beginn des Tages. </p>
*
* @return midnight at the start of day T00:00
* @see #midnightAtEndOfDay()
*/
public static PlainTime midnightAtStartOfDay() {
return PlainTime.MIN;
}
/**
* <p>Yields midnight at the end of the day, that is midnight at
* the start of the following day. </p>
*
* @return midnight at the end of day T24:00
* @see #midnightAtStartOfDay()
*/
/*[deutsch]
* <p>Liefert Mitternacht zum Ende des Tages, das ist Mitternacht zum
* Start des Folgetags. </p>
*
* @return midnight at the end of day T24:00
* @see #midnightAtStartOfDay()
*/
public static PlainTime midnightAtEndOfDay() {
return PlainTime.MAX;
}
/**
* <p>Creates a wall time as full hour. </p>
*
* @param hour iso-hour of day in the range {@code 0-24}
* @return cached full hour
* @throws IllegalArgumentException if given hour is out of range
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit als volle Stunde. </p>
*
* @param hour iso-hour of day in the range {@code 0-24}
* @return cached full hour
* @throws IllegalArgumentException if given hour is out of range
*/
public static PlainTime of(int hour) {
checkHour(hour);
return HOURS[hour];
}
/**
* <p>Creates a wall time with hour and minute. </p>
*
* @param hour hour of day in the range {@code 0-23} or
* {@code 24} if the given minute equals to {@code 0}
* @param minute minute in the range {@code 0-59}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit mit Stunde und Minute. </p>
*
* @param hour hour of day in the range {@code 0-23} or
* {@code 24} if the given minute equals to {@code 0}
* @param minute minute in the range {@code 0-59}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
*/
public static PlainTime of(
int hour,
int minute
) {
if (minute == 0) {
return PlainTime.of(hour);
}
return new PlainTime(hour, minute, 0, 0, true);
}
/**
* <p>Creates a wall time with hour, minute and second. </p>
*
* @param hour hour in the range {@code 0-23} or {@code 24}
* if the other arguments are equal to {@code 0}
* @param minute minute in the range {@code 0-59}
* @param second second in the range {@code 0-59}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit mit Stunde, Minute und Sekunde. </p>
*
* @param hour hour in the range {@code 0-23} or {@code 24}
* if the other arguments are equal to {@code 0}
* @param minute minute in the range {@code 0-59}
* @param second second in the range {@code 0-59}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
*/
public static PlainTime of(
int hour,
int minute,
int second
) {
if ((minute | second) == 0) {
return PlainTime.of(hour);
}
return new PlainTime(hour, minute, second, 0, true);
}
/**
* <p>Creates a wall time with hour, minute, second and nanosecond. </p>
*
* @param hour hour in the range {@code 0-23} or {@code 24}
* if the other argumenta equal to {@code 0}
* @param minute minute in the range {@code 0-59}
* @param second second in the range {@code 0-59}
* @param nanosecond nanosecond in the range {@code 0-999,999,999}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
* @see #of(int)
* @see #of(int, int)
* @see #of(int, int, int)
* @see #NANO_OF_SECOND
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit mit Stunde, Minute, Sekunde und
* Nanosekunde. </p>
*
* @param hour hour in the range {@code 0-23} or {@code 24}
* if the other argumenta equal to {@code 0}
* @param minute minute in the range {@code 0-59}
* @param second second in the range {@code 0-59}
* @param nanosecond nanosecond in the range {@code 0-999,999,999}
* @return new or cached wall time
* @throws IllegalArgumentException if any argument is out of range
* @see #of(int)
* @see #of(int, int)
* @see #of(int, int, int)
* @see #NANO_OF_SECOND
*/
public static PlainTime of(
int hour,
int minute,
int second,
int nanosecond
) {
return PlainTime.of(hour, minute, second, nanosecond, true);
}
/**
* <p>Creates a wall time by given decimal hour. </p>
*
* @param decimal decimal hour of day in the range {@code [0.0-24.0]}
* @return new or cached wall time
* @throws IllegalArgumentException if the argument is out of range
* @see #DECIMAL_HOUR
*/
/*[deutsch]
* <p>Erzeugt eine neue Uhrzeit auf Basis der angegebenen
* Dezimalstunde. </p>
*
* @param decimal decimal hour of day in the range {@code [0.0-24.0]}
* @return new or cached wall time
* @throws IllegalArgumentException if the argument is out of range
* @see #DECIMAL_HOUR
*/
public static PlainTime of(BigDecimal decimal) {
return H_DECIMAL_RULE.withValue(null, decimal, false);
}
/**
* <p>Common conversion method. </p>
*
* @param time ISO-time
* @return PlainTime
*/
/*[deutsch]
* <p>Allgemeine Konversionsmethode. </p>
*
* @param time ISO-time
* @return PlainTime
*/
public static PlainTime from(WallTime time) {
if (time instanceof PlainTime) {
return (PlainTime) time;
} else if (time instanceof PlainTimestamp) {
return ((PlainTimestamp) time).getWallTime();
} else {
return PlainTime.of(
time.getHour(),
time.getMinute(),
time.getSecond(),
time.getNanosecond());
}
}
/**
* <p>Rolls this time by the given duration (as amount and unit) and
* also counts possible day overflow. </p>
*
* @param amount amount to be added (maybe negative)
* @param unit time unit
* @return result of rolling including possible day overflow
* @see #plus(long, Object) plus(long, IsoTimeUnit)
*/
/*[deutsch]
* <p>Rollt die angegebene Dauer mit Betrag und Einheit zu dieser Uhrzeit
* auf und zählt dabei auch tageweise Überläufe. </p>
*
* @param amount amount to be added (maybe negative)
* @param unit time unit
* @return result of rolling including possible day overflow
* @see #plus(long, Object) plus(long, IsoTimeUnit)
*/
public DayCycles roll(
long amount,
ClockUnit unit
) {
return ClockUnitRule.addToWithOverflow(this, amount, unit);
}
/**
* <p>Creates a new formatter which uses the given pattern in the
* default locale for formatting and parsing plain times. </p>
*
* <p>Note: The formatter can be adjusted to other locales however. </p>
*
* @param formatPattern format definition as pattern
* @param patternType pattern dialect
* @return format object for formatting {@code PlainTime}-objects
* using system locale
* @throws IllegalArgumentException if resolving of pattern fails
* @see PatternType
* @see ChronoFormatter#with(Locale)
*/
/*[deutsch]
* <p>Erzeugt ein neues Format-Objekt mit Hilfe des angegebenen Musters
* in der Standard-Sprach- und Ländereinstellung. </p>
*
* <p>Das Format-Objekt kann an andere Sprachen angepasst werden. </p>
*
* @param formatPattern format definition as pattern
* @param patternType pattern dialect
* @return format object for formatting {@code PlainTime}-objects
* using system locale
* @throws IllegalArgumentException if resolving of pattern fails
* @see PatternType
* @see ChronoFormatter#with(Locale)
*/
public static ChronoFormatter<PlainTime> localFormatter(
String formatPattern,
ChronoPattern patternType
) {
return ChronoFormatter
.setUp(PlainTime.class, Locale.getDefault())
.addPattern(formatPattern, patternType)
.build();
}
/**
* <p>Creates a new formatter which uses the given display mode in the
* default locale for formatting and parsing plain times. </p>
*
* <p>Note: The formatter can be adjusted to other locales however. </p>
*
* @param mode formatting style
* @return format object for formatting {@code PlainTime}-objects
* using system locale
* @throws IllegalStateException if format pattern cannot be retrieved
* @see ChronoFormatter#with(Locale)
*/
/*[deutsch]
* <p>Erzeugt ein neues Format-Objekt mit Hilfe des angegebenen Stils
* in der Standard-Sprach- und Ländereinstellung. </p>
*
* <p>Das Format-Objekt kann an andere Sprachen angepasst werden. </p>
*
* @param mode formatting style
* @return format object for formatting {@code PlainTime}-objects
* using system locale
* @throws IllegalStateException if format pattern cannot be retrieved
* @see ChronoFormatter#with(Locale)
*/
public static ChronoFormatter<PlainTime> localFormatter(DisplayMode mode) {
int style = PatternType.getFormatStyle(mode);
DateFormat df = DateFormat.getTimeInstance(style);
String pattern = removeZones(PatternType.getFormatPattern(df));
return ChronoFormatter
.setUp(PlainTime.class, Locale.getDefault())
.addPattern(pattern, PatternType.SIMPLE_DATE_FORMAT)
.build();
}
/**
* <p>Creates a new formatter which uses the given pattern and locale
* for formatting and parsing plain times. </p>
*
* <p>Note: The formatter can be adjusted to other locales however. </p>
*
* @param formatPattern format definition as pattern
* @param patternType pattern dialect
* @param locale locale setting
* @return format object for formatting {@code PlainTime}-objects
* using given locale
* @throws IllegalArgumentException if resolving of pattern fails
* @see PatternType
* @see #localFormatter(String,ChronoPattern)
*/
/*[deutsch]
* <p>Erzeugt ein neues Format-Objekt mit Hilfe des angegebenen Musters
* in der angegebenen Sprach- und Ländereinstellung. </p>
*
* <p>Das Format-Objekt kann an andere Sprachen angepasst werden. </p>
*
* @param formatPattern format definition as pattern
* @param patternType pattern dialect
* @param locale locale setting
* @return format object for formatting {@code PlainTime}-objects
* using given locale
* @throws IllegalArgumentException if resolving of pattern fails
* @see PatternType
* @see #localFormatter(String,ChronoPattern)
*/
public static ChronoFormatter<PlainTime> formatter(
String formatPattern,
ChronoPattern patternType,
Locale locale
) {
return ChronoFormatter
.setUp(PlainTime.class, locale)
.addPattern(formatPattern, patternType)
.build();
}
/**
* <p>Creates a new formatter which uses the given display mode and locale
* for formatting and parsing plain times. </p>
*
* <p>Note: The formatter can be adjusted to other locales however. </p>
*
* @param mode formatting style
* @param locale locale setting
* @return format object for formatting {@code PlainTime}-objects
* using given locale
* @throws IllegalStateException if format pattern cannot be retrieved
* @see #localFormatter(DisplayMode)
*/
/*[deutsch]
* <p>Erzeugt ein neues Format-Objekt mit Hilfe des angegebenen Stils
* und in der angegebenen Sprach- und Ländereinstellung. </p>
*
* <p>Das Format-Objekt kann an andere Sprachen angepasst werden. </p>
*
* @param mode formatting style
* @param locale locale setting
* @return format object for formatting {@code PlainTime}-objects
* using given locale
* @throws IllegalStateException if format pattern cannot be retrieved
* @see #localFormatter(DisplayMode)
*/
public static ChronoFormatter<PlainTime> formatter(
DisplayMode mode,
Locale locale
) {
int style = PatternType.getFormatStyle(mode);
DateFormat df = DateFormat.getTimeInstance(style, locale);
String pattern = removeZones(PatternType.getFormatPattern(df));
return ChronoFormatter
.setUp(PlainTime.class, locale)
.addPattern(pattern, PatternType.SIMPLE_DATE_FORMAT)
.build();
}
/**
* <p>Compares the full state, that is hour, minute, second and nanosecond
* of this instance and given argument. </p>
*/
/*[deutsch]
* <p>Vergleicht alle Zeitzustandsattribute, nämlich Stunde, Minute,
* Sekunde und Nanosekunde. </p>
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof PlainTime) {
PlainTime that = (PlainTime) obj;
return (
(this.hour == that.hour)
&& (this.minute == that.minute)
&& (this.second == that.second)
&& (this.nano == that.nano)
);
} else {
return false;
}
}
/*[deutsch]
* <p>Basiert auf allen Zeitzustandsattributen. </p>
*/
@Override
public int hashCode() {
return (
this.hour
+ 60 * this.minute
+ 3600 * this.second
+ 37 * this.nano);
}
@Override
public boolean isBefore(PlainTime time) {
return (this.compareTo(time) < 0);
}
@Override
public boolean isAfter(PlainTime time) {
return (this.compareTo(time) > 0);
}
@Override
public boolean isSimultaneous(PlainTime time) {
return (this.compareTo(time) == 0);
}
/**
* <p>Is this instance at midnight, either at start or at end of day? </p>
*
* @return boolean
*/
/*[deutsch]
* <p>Liegt Mitternacht vor (am Anfang oder am Ende eines Tages)? </p>
*
* @return boolean
*/
public boolean isMidnight() {
return (this.isFullHour() && ((this.hour % 24) == 0));
}
/**
* <p>Defines a natural order which is solely based on the timeline
* order. </p>
*
* <p>The natural order is consistent with {@code equals()}. </p>
*
* @see #isBefore(PlainTime)
* @see #isAfter(PlainTime)
*/
/*[deutsch]
* <p>Definiert eine natürliche Ordnung, die auf der zeitlichen
* Position basiert. </p>
*
* <p>Der Vergleich ist konsistent mit {@code equals()}. </p>
*
* @see #isBefore(PlainTime)
* @see #isAfter(PlainTime)
*/
@Override
public int compareTo(PlainTime time) {
int delta = this.hour - time.hour;
if (delta == 0) {
delta = this.minute - time.minute;
if (delta == 0) {
delta = this.second - time.second;
if (delta == 0) {
delta = this.nano - time.nano;
}
}
}
return ((delta < 0) ? -1 : ((delta == 0) ? 0 : 1));
}
/**
* <p>Dependent on the precision of this instance, this method yields a
* canonical representation in one of following formats (CLDR-syntax): </p>
*
* <ul>
* <li>'T'HH</li>
* <li>'T'HH:mm</li>
* <li>'T'HH:mm:ss</li>
* <li>'T'HH:mm:ss,SSS</li>
* <li>'T'HH:mm:ss,SSSSSS</li>
* <li>'T'HH:mm:ss,SSSSSSSSS</li>
* </ul>
*
* <p>The fraction part will be preceded by a comma as recommended by ISO
* unless the system property "net.time4j.format.iso.decimal.dot"
* was set to "true". </p>
*
* @return canonical ISO-8601-formatted string
*/
/*[deutsch]
* <p>Liefert je nach Genauigkeit einen String in einem der folgenden
* Formate (CLDR-Syntax): </p>
*
* <ul>
* <li>'T'HH</li>
* <li>'T'HH:mm</li>
* <li>'T'HH:mm:ss</li>
* <li>'T'HH:mm:ss,SSS</li>
* <li>'T'HH:mm:ss,SSSSSS</li>
* <li>'T'HH:mm:ss,SSSSSSSSS</li>
* </ul>
*
* <p>Vor dem Sekundenbruchteil erscheint im Standardfall das Komma, es sei
* denn, die System-Property "net.time4j.format.iso.decimal.dot"
* wurde auf "true" gesetzt. </p>
*
* @return canonical ISO-8601-formatted string
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder(19);
sb.append('T');
append2Digits(this.hour, sb);
if ((this.minute | this.second | this.nano) != 0) {
sb.append(':');
append2Digits(this.minute, sb);
if ((this.second | this.nano) != 0) {
sb.append(':');
append2Digits(this.second, sb);
if (this.nano != 0) {
sb.append(ISO_DECIMAL_SEPARATOR);
String num = Integer.toString(this.nano);
int len;
if ((this.nano % MIO) == 0) {
len = 3;
} else if ((this.nano % KILO) == 0) {
len = 6;
} else {
len = 9;
}
for (int i = num.length(); i < 9; i++) {
sb.append('0');
}
for (
int i = 0, n = Math.min(len, num.length());
i < n;
i++
) {
sb.append(num.charAt(i));
}
}
}
}
return sb.toString();
}
/**
* <p>Provides a static access to the associated time axis respective
* chronology which contains the chronological rules. </p>
*
* @return chronological system as time axis (never {@code null})
*/
/*[deutsch]
* <p>Liefert die zugehörige Zeitachse, die alle notwendigen
* chronologischen Regeln enthält. </p>
*
* @return chronological system as time axis (never {@code null})
*/
public static TimeAxis<IsoTimeUnit, PlainTime> axis() {
return ENGINE;
}
/**
* @doctags.exclude
*/
@Override
protected TimeAxis<IsoTimeUnit, PlainTime> getChronology() {
return ENGINE;
}
/**
* @doctags.exclude
*/
@Override
protected PlainTime getContext() {
return this;
}
/**
* <p>Erzeugt eine neue Uhrzeit passend zur angegebenen absoluten Zeit. </p>
*
* @param ut unix time in seconds
* @param offset shift of local time relative to UTC
* @return new or cached wall time
*/
static PlainTime from(
UnixTime ut,
ZonalOffset offset
) {
long localSeconds = ut.getPosixTime() + offset.getIntegralAmount();
int localNanos = ut.getNanosecond() + offset.getFractionalAmount();
if (localNanos < 0) {
localNanos += MRD;
localSeconds--;
} else if (localNanos >= MRD) {
localNanos -= MRD;
localSeconds++;
}
int secondsOfDay = MathUtils.floorModulo(localSeconds, 86400);
int second = secondsOfDay % 60;
int minutesOfDay = secondsOfDay / 60;
int minute = minutesOfDay % 60;
int hour = minutesOfDay / 60;
return PlainTime.of(
hour,
minute,
second,
localNanos
);
}
/**
* <p>Dient der Serialisierungsunterstützung. </p>
*
* @param elementName name of element
* @return found element or {@code null}
*/
// optional
static Object lookupElement(String elementName) {
return ELEMENTS.get(elementName);
}
/**
* <p>Wird von SQL-TIMESTAMP gebraucht. </p>
*
* @param millisOfDay milliseconds of day
* @return new instance
*/
static PlainTime createFromMillis(int millisOfDay) {
return PlainTime.createFromMillis(millisOfDay, 0);
}
/**
* <p>Wird von der {@code ratio()}-Function des angegebenenElements
* aufgerufen. </p>
*
* @param element reference time element
* @return {@code true} if element maximum is reduced else {@code false}
*/
boolean hasReducedRange(ChronoElement<?> element) {
return (
((element == MILLI_OF_DAY) && ((this.nano % MIO) != 0))
|| ((element == ISO_HOUR) && !this.isFullHour())
|| ((element == MINUTE_OF_DAY) && !this.isFullMinute())
|| ((element == SECOND_OF_DAY) && (this.nano != 0))
|| ((element == MICRO_OF_DAY) && ((this.nano % KILO) != 0))
);
}
private static PlainTime of(
int hour,
int minute,
int second,
int nanosecond,
boolean validating
) {
if ((minute | second | nanosecond) == 0) {
if (validating) {
return PlainTime.of(hour);
} else {
return HOURS[hour];
}
}
return new PlainTime(hour, minute, second, nanosecond, validating);
}
private static void fill(
Map<String, Object> map,
ChronoElement<?> element
) {
map.put(element.name(), element);
}
private static void append2Digits(
int element,
StringBuilder sb
) {
if (element < 10) {
sb.append('0');
}
sb.append(element);
}
private static void checkHour(long hour) {
if (hour < 0 || hour > 24) {
throw new IllegalArgumentException(
"HOUR_OF_DAY out of range: " + hour);
}
}
private static void checkMinute(long minute) {
if (minute < 0 || minute > 59) {
throw new IllegalArgumentException(
"MINUTE_OF_HOUR out of range: " + minute);
}
}
private static void checkSecond(long second) {
if (second < 0 || second > 59) {
throw new IllegalArgumentException(
"SECOND_OF_MINUTE out of range: " + second);
}
}
private static void checkNano(int nano) {
if (nano < 0 || nano >= MRD) {
throw new IllegalArgumentException(
"NANO_OF_SECOND out of range: " + nano);
}
}
private static PlainTime createFromMillis(
int millisOfDay,
int micros
) {
int nanosecond = (millisOfDay % KILO) * MIO + micros;
int secondsOfDay = millisOfDay / KILO;
int second = secondsOfDay % 60;
int minutesOfDay = secondsOfDay / 60;
int minute = minutesOfDay % 60;
int hour = minutesOfDay / 60;
return PlainTime.of(hour, minute, second, nanosecond);
}
private static PlainTime createFromMicros(
long microsOfDay,
int nanos
) {
int nanosecond = ((int) (microsOfDay % MIO)) * KILO + nanos;
int secondsOfDay = (int) (microsOfDay / MIO);
int second = secondsOfDay % 60;
int minutesOfDay = secondsOfDay / 60;
int minute = minutesOfDay % 60;
int hour = minutesOfDay / 60;
return PlainTime.of(hour, minute, second, nanosecond);
}
private static PlainTime createFromNanos(long nanosOfDay) {
int nanosecond = (int) (nanosOfDay % MRD);
int secondsOfDay = (int) (nanosOfDay / MRD);
int second = secondsOfDay % 60;
int minutesOfDay = secondsOfDay / 60;
int minute = minutesOfDay % 60;
int hour = minutesOfDay / 60;
return PlainTime.of(hour, minute, second, nanosecond);
}
private long getNanoOfDay() {
return (
this.nano
+ this.second * 1L * MRD
+ this.minute * 60L * MRD
+ this.hour * 3600L * MRD
);
}
private boolean isFullHour() {
return ((this.minute | this.second | this.nano) == 0);
}
private boolean isFullMinute() {
return ((this.second | this.nano) == 0);
}
private static void registerUnits(TimeAxis.Builder<IsoTimeUnit, PlainTime> builder) {
Set<ClockUnit> convertibles = EnumSet.allOf(ClockUnit.class);
for (ClockUnit unit : ClockUnit.values()) {
builder.appendUnit(
unit,
new ClockUnitRule(unit),
unit.getLength(),
convertibles);
}
}
private static long floorMod(
long value,
long divisor
) {
long num =
(value >= 0)
? (value / divisor)
: (((value + 1) / divisor) - 1);
return (value - divisor * num);
}
private static long floorDiv(
long value,
long divisor
) {
if (value >= 0) {
return (value / divisor);
} else {
return ((value + 1) / divisor) - 1;
}
}
// JDK-Patterns hinten, mittig und vorne von Zeitzonen-Symbolen befreien
private static String removeZones(String pattern) {
String s = pattern.replace(" z", ""); // for en-CA
if (s.charAt(s.length() - 1) == 'z') {
for (int i = s.length() - 1; i > 0; i--) {
if (s.charAt(i - 1) != 'z') {
s = s.substring(0, i).trim();
break;
}
}
}
if (s.charAt(0) == 'z') {
for (int i = 1; i < s.length(); i++) {
if (s.charAt(i) != 'z') {
s = s.substring(i).trim();
break;
}
}
}
return s;
}
/**
* @serialData Uses <a href="../../serialized-form.html#net.time4j.SPX">
* a dedicated serialization form</a> as proxy. The layout
* is bit-compressed. The first byte contains within the
* four most significant bits the type id {@code 2}. Then
* the data bytes for hour, minute, second and nanosecond
* follow (in last case int instead of byte). Is the precision
* limited to seconds, minutes or hours then the last non-zero
* byte will be bit-inverted by the operator (~), and the
* following bytes will be left out. The hour byte however
* is always written.
*
* Schematic algorithm:
*
* <pre>
* out.writeByte(2 << 4);
*
* if (time.nano == 0) {
* if (time.second == 0) {
* if (time.minute == 0) {
* out.writeByte(~time.hour);
* } else {
* out.writeByte(time.hour);
* out.writeByte(~time.minute);
* }
* } else {
* out.writeByte(time.hour);
* out.writeByte(time.minute);
* out.writeByte(~time.second);
* }
* } else {
* out.writeByte(time.hour);
* out.writeByte(time.minute);
* out.writeByte(time.second);
* out.writeInt(time.nano);
* }
* </pre>
*
* @return replacement object in serialization graph
*/
private Object writeReplace() {
return new SPX(this, SPX.TIME_TYPE);
}
/**
* @serialData Blocks because a serialization proxy is required.
* @param in object input stream
* @throws InvalidObjectException (always)
*/
private void readObject(ObjectInputStream in)
throws IOException {
throw new InvalidObjectException("Serialization proxy required.");
}
//~ Innere Klassen ----------------------------------------------------
private static class ClockUnitRule
implements UnitRule<PlainTime> {
//~ Instanzvariablen ----------------------------------------------
private final ClockUnit unit;
//~ Konstruktoren -------------------------------------------------
private ClockUnitRule(ClockUnit unit) {
super();
this.unit = unit;
}
//~ Methoden ------------------------------------------------------
@Override
public PlainTime addTo(
PlainTime context,
long amount
) {
if (amount == 0) {
return context;
}
return doAdd(PlainTime.class, this.unit, context, amount);
}
@Override
public long between(
PlainTime start,
PlainTime end
) {
long delta = (end.getNanoOfDay() - start.getNanoOfDay());
long factor;
switch (this.unit) {
case HOURS:
factor = MRD * 3600L;
break;
case MINUTES:
factor = MRD * 60L;
break;
case SECONDS:
factor = MRD;
break;
case MILLIS:
factor = MIO;
break;
case MICROS:
factor = KILO;
break;
case NANOS:
factor = 1;
break;
default:
throw new UnsupportedOperationException(this.unit.name());
}
return delta / factor;
}
private static DayCycles addToWithOverflow(
PlainTime context,
long amount,
ClockUnit unit
) {
if ((amount == 0) && (context.hour < 24)) {
return new DayCycles(0, context);
}
return doAdd(DayCycles.class, unit, context, amount);
}
private static <R> R doAdd(
Class<R> returnType,
ClockUnit unit,
PlainTime context,
long amount
) {
long hours;
long minutes;
long seconds;
long nanos;
int minute = context.minute;
int second = context.second;
int fraction = context.nano;
switch (unit) {
case HOURS:
hours = MathUtils.safeAdd(context.hour, amount);
break;
case MINUTES:
minutes = MathUtils.safeAdd(context.minute, amount);
hours =
MathUtils.safeAdd(
context.hour,
MathUtils.floorDivide(minutes, 60));
minute = MathUtils.floorModulo(minutes, 60);
break;
case SECONDS:
seconds = MathUtils.safeAdd(context.second, amount);
minutes =
MathUtils.safeAdd(
context.minute,
MathUtils.floorDivide(seconds, 60));
hours =
MathUtils.safeAdd(
context.hour,
MathUtils.floorDivide(minutes, 60));
minute = MathUtils.floorModulo(minutes, 60);
second = MathUtils.floorModulo(seconds, 60);
break;
case MILLIS:
return doAdd(
returnType,
ClockUnit.NANOS,
context,
MathUtils.safeMultiply(amount, MIO));
case MICROS:
return doAdd(
returnType,
ClockUnit.NANOS,
context,
MathUtils.safeMultiply(amount, KILO));
case NANOS:
nanos =
MathUtils.safeAdd(context.nano, amount);
seconds =
MathUtils.safeAdd(
context.second,
MathUtils.floorDivide(nanos, MRD));
minutes =
MathUtils.safeAdd(
context.minute,
MathUtils.floorDivide(seconds, 60));
hours =
MathUtils.safeAdd(
context.hour,
MathUtils.floorDivide(minutes, 60));
minute = MathUtils.floorModulo(minutes, 60);
second = MathUtils.floorModulo(seconds, 60);
fraction = MathUtils.floorModulo(nanos, MRD);
break;
default:
throw new UnsupportedOperationException(unit.name());
}
int hour = MathUtils.floorModulo(hours, 24);
PlainTime time;
if ((hour | minute | second | fraction) == 0) { // midnight
time = (
((amount > 0) && (returnType == PlainTime.class))
? PlainTime.MAX
: PlainTime.MIN);
} else {
time = PlainTime.of(hour, minute, second, fraction);
}
if (returnType == PlainTime.class) {
return returnType.cast(time);
} else {
long cycles = MathUtils.floorDivide(hours, 24);
return returnType.cast(new DayCycles(cycles, time));
}
}
}
private static class TimeRule
implements ElementRule<PlainTime, PlainTime> {
//~ Methoden ------------------------------------------------------
@Override
public PlainTime getValue(PlainTime context) {
return context;
}
@Override
public PlainTime withValue(
PlainTime context,
PlainTime value,
boolean lenient
) {
if (value == null) {
throw new NullPointerException("Missing time value.");
}
return value;
}
@Override
public boolean isValid(
PlainTime context,
PlainTime value
) {
return (value != null);
}
@Override
public PlainTime getMinimum(PlainTime context) {
return PlainTime.MIN;
}
@Override
public PlainTime getMaximum(PlainTime context) {
return PlainTime.MAX;
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return null;
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return null;
}
}
private static class PrecisionRule
implements ElementRule<PlainTime, ClockUnit> {
//~ Methoden ------------------------------------------------------
@Override
public ClockUnit getValue(PlainTime context) {
if (context.nano != 0) {
if ((context.nano % MIO) == 0) {
return ClockUnit.MILLIS;
} else if ((context.nano % KILO) == 0) {
return ClockUnit.MICROS;
} else {
return ClockUnit.NANOS;
}
} else if (context.second != 0) {
return ClockUnit.SECONDS;
} else if (context.minute != 0) {
return ClockUnit.MINUTES;
} else {
return ClockUnit.HOURS;
}
}
@Override
public PlainTime withValue(
PlainTime context,
ClockUnit value,
boolean lenient
) {
int ordinal = value.ordinal();
if (ordinal >= this.getValue(context).ordinal()) {
return context; // Kein Abschneiden notwendig!
}
switch (value) {
case HOURS:
return PlainTime.of(context.hour);
case MINUTES:
return PlainTime.of(context.hour, context.minute);
case SECONDS:
return PlainTime.of(
context.hour, context.minute, context.second);
case MILLIS:
return PlainTime.of(
context.hour,
context.minute,
context.second,
(context.nano / MIO) * MIO);
case MICROS:
return PlainTime.of(
context.hour,
context.minute,
context.second,
(context.nano / KILO) * KILO);
case NANOS:
return context; // Programm sollte nie hierher kommen!
default:
throw new UnsupportedOperationException(value.name());
}
}
@Override
public boolean isValid(
PlainTime context,
ClockUnit value
) {
return (value != null);
}
@Override
public ClockUnit getMinimum(PlainTime context) {
return ClockUnit.HOURS;
}
@Override
public ClockUnit getMaximum(PlainTime context) {
return ClockUnit.NANOS;
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return null;
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return null;
}
}
private static class MeridiemRule
implements ElementRule<PlainTime, Meridiem> {
//~ Methoden ------------------------------------------------------
@Override
public Meridiem getValue(PlainTime context) {
return Meridiem.ofHour(context.hour);
}
@Override
public PlainTime withValue(
PlainTime context,
Meridiem value,
boolean lenient
) {
int h = ((context.hour == 24) ? 0 : context.hour);
if (value == null) {
throw new NullPointerException("Missing am/pm-value.");
} else if (value == Meridiem.AM) {
if (h >= 12) {
h -= 12;
}
} else if (value == Meridiem.PM) {
if (h < 12) {
h += 12;
}
}
return PlainTime.of(
h,
context.minute,
context.second,
context.nano
);
}
@Override
public boolean isValid(
PlainTime context,
Meridiem value
) {
return (value != null);
}
@Override
public Meridiem getMinimum(PlainTime context) {
return Meridiem.AM;
}
@Override
public Meridiem getMaximum(PlainTime context) {
return Meridiem.PM;
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return DIGITAL_HOUR_OF_AMPM;
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return DIGITAL_HOUR_OF_AMPM;
}
}
private static class IntegerElementRule
implements ElementRule<PlainTime, Integer> {
//~ Instanzvariablen ----------------------------------------------
private final ChronoElement<Integer> element;
private final int index;
private final int min;
private final int max;
//~ Konstruktoren -------------------------------------------------
IntegerElementRule(
ChronoElement<Integer> element,
int min,
int max
) {
super();
this.element = element;
if (element instanceof IntegerTimeElement) {
this.index = ((IntegerTimeElement) element).getIndex();
} else {
this.index = -1;
}
this.min = min;
this.max = max;
}
//~ Methoden ------------------------------------------------------
@Override
public Integer getValue(PlainTime context) {
int ret;
switch (this.index) {
case IntegerTimeElement.CLOCK_HOUR_OF_AMPM:
ret = (context.hour % 12);
if (ret == 0) {
ret = 12;
}
break;
case IntegerTimeElement.CLOCK_HOUR_OF_DAY:
ret = context.hour % 24;
if (ret == 0) {
ret = 24;
}
break;
case IntegerTimeElement.DIGITAL_HOUR_OF_AMPM:
ret = (context.hour % 12);
break;
case IntegerTimeElement.DIGITAL_HOUR_OF_DAY:
ret = context.hour % 24;
break;
case IntegerTimeElement.ISO_HOUR:
ret = context.hour;
break;
case IntegerTimeElement.MINUTE_OF_HOUR:
ret = context.minute;
break;
case IntegerTimeElement.MINUTE_OF_DAY:
ret = context.hour * 60 + context.minute;
break;
case IntegerTimeElement.SECOND_OF_MINUTE:
ret = context.second;
break;
case IntegerTimeElement.SECOND_OF_DAY:
ret =
context.hour * 3600
+ context.minute * 60
+ context.second;
break;
case IntegerTimeElement.MILLI_OF_SECOND:
ret = (context.nano / MIO);
break;
case IntegerTimeElement.MICRO_OF_SECOND:
ret = (context.nano / KILO);
break;
case IntegerTimeElement.NANO_OF_SECOND:
ret = context.nano;
break;
case IntegerTimeElement.MILLI_OF_DAY:
ret = (int) (context.getNanoOfDay() / MIO);
break;
default:
throw new UnsupportedOperationException(
this.element.name());
}
return Integer.valueOf(ret);
}
@Override
public PlainTime withValue(
PlainTime context,
Integer value,
boolean lenient
) {
if (value == null) {
throw new NullPointerException("Missing element value.");
} else if (lenient) {
return this.withValueInLenientMode(context, value.intValue());
} else if (!this.isValid(context, value)) {
throw new IllegalArgumentException(
"Value out of range: " + value);
}
int h = context.hour;
int m = context.minute;
int s = context.second;
int f = context.nano;
int v = value.intValue();
switch (this.index) {
case IntegerTimeElement.CLOCK_HOUR_OF_AMPM:
v = ((v == 12) ? 0 : v);
h = (isAM(context) ? v : (v + 12));
break;
case IntegerTimeElement.CLOCK_HOUR_OF_DAY:
h = ((v == 24) ? 0 : v);
break;
case IntegerTimeElement.DIGITAL_HOUR_OF_AMPM:
h = (isAM(context) ? v : (v + 12));
break;
case IntegerTimeElement.DIGITAL_HOUR_OF_DAY:
h = v;
break;
case IntegerTimeElement.ISO_HOUR:
h = v;
break;
case IntegerTimeElement.MINUTE_OF_HOUR:
m = v;
break;
case IntegerTimeElement.MINUTE_OF_DAY:
h = v / 60;
m = v % 60;
break;
case IntegerTimeElement.SECOND_OF_MINUTE:
s = v;
break;
case IntegerTimeElement.SECOND_OF_DAY:
h = v / 3600;
int remainder = v % 3600;
m = remainder / 60;
s = remainder % 60;
break;
case IntegerTimeElement.MILLI_OF_SECOND:
f = v * MIO + (context.nano % MIO);
break;
case IntegerTimeElement.MICRO_OF_SECOND:
f = v * KILO + (context.nano % KILO);
break;
case IntegerTimeElement.NANO_OF_SECOND:
f = v;
break;
case IntegerTimeElement.MILLI_OF_DAY:
return PlainTime.createFromMillis(v, context.nano % MIO);
default:
throw new UnsupportedOperationException(
this.element.name());
}
return PlainTime.of(h, m, s, f);
}
@Override
public boolean isValid(
PlainTime context,
Integer value
) {
if (value == null) {
return false;
}
int v = value.intValue();
if ((v < this.min) || (v > this.max)) {
return false;
}
if (v == this.max) {
switch (this.index) {
case IntegerTimeElement.ISO_HOUR:
return context.isFullHour();
case IntegerTimeElement.MINUTE_OF_DAY:
return context.isFullMinute();
case IntegerTimeElement.SECOND_OF_DAY:
return (context.nano == 0);
case IntegerTimeElement.MILLI_OF_DAY:
return ((context.nano % MIO) == 0);
default:
// no-op
}
}
if (context.hour == 24) {
switch (this.index) {
case IntegerTimeElement.MINUTE_OF_HOUR:
case IntegerTimeElement.SECOND_OF_MINUTE:
case IntegerTimeElement.MILLI_OF_SECOND:
case IntegerTimeElement.MICRO_OF_SECOND:
case IntegerTimeElement.NANO_OF_SECOND:
return (v == 0);
default:
// no-op
}
}
return true;
}
@Override
public Integer getMinimum(PlainTime context) {
return Integer.valueOf(this.min);
}
@Override
public Integer getMaximum(PlainTime context) {
if (context.hour == 24) {
switch (this.index) {
case IntegerTimeElement.MINUTE_OF_HOUR:
case IntegerTimeElement.SECOND_OF_MINUTE:
case IntegerTimeElement.MILLI_OF_SECOND:
case IntegerTimeElement.MICRO_OF_SECOND:
case IntegerTimeElement.NANO_OF_SECOND:
return Integer.valueOf(0);
default:
// no-op
}
}
if (context.hasReducedRange(this.element)) {
return Integer.valueOf(this.max - 1);
}
return Integer.valueOf(this.max);
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return this.getChild(context);
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return this.getChild(context);
}
private ChronoElement<?> getChild(PlainTime context) {
switch (this.index) {
case IntegerTimeElement.CLOCK_HOUR_OF_AMPM:
case IntegerTimeElement.CLOCK_HOUR_OF_DAY:
case IntegerTimeElement.DIGITAL_HOUR_OF_AMPM:
case IntegerTimeElement.DIGITAL_HOUR_OF_DAY:
case IntegerTimeElement.ISO_HOUR:
return MINUTE_OF_HOUR;
case IntegerTimeElement.MINUTE_OF_HOUR:
case IntegerTimeElement.MINUTE_OF_DAY:
return SECOND_OF_MINUTE;
case IntegerTimeElement.SECOND_OF_MINUTE:
case IntegerTimeElement.SECOND_OF_DAY:
return NANO_OF_SECOND;
default:
return null;
}
}
private PlainTime withValueInLenientMode(
PlainTime context,
int value
) {
if (
(this.element == ISO_HOUR)
|| (this.element == DIGITAL_HOUR_OF_DAY)
|| (this.element == DIGITAL_HOUR_OF_AMPM)
) {
return context.plus(
MathUtils.safeSubtract(value, context.get(this.element)),
ClockUnit.HOURS);
} else if (this.element == MINUTE_OF_HOUR) {
return context.plus(
MathUtils.safeSubtract(value, context.minute),
ClockUnit.MINUTES);
} else if (this.element == SECOND_OF_MINUTE) {
return context.plus(
MathUtils.safeSubtract(value, context.second),
ClockUnit.SECONDS);
} else if (this.element == MILLI_OF_SECOND) {
return context.plus(
MathUtils.safeSubtract(
value, context.get(MILLI_OF_SECOND)),
ClockUnit.MILLIS);
} else if (this.element == MICRO_OF_SECOND) {
return context.plus(
MathUtils.safeSubtract(
value, context.get(MICRO_OF_SECOND)),
ClockUnit.MICROS);
} else if (this.element == NANO_OF_SECOND) {
return context.plus(
MathUtils.safeSubtract(value, context.nano),
ClockUnit.NANOS);
} else if (this.element == MILLI_OF_DAY) {
int remainder1 = MathUtils.floorModulo(value, 86400 * KILO);
int remainder2 = context.nano % MIO;
if ((remainder1 == 0) && (remainder2 == 0)) {
return (value > 0) ? PlainTime.MAX : PlainTime.MIN;
} else {
return PlainTime.createFromMillis(remainder1, remainder2);
}
} else if (this.element == MINUTE_OF_DAY) {
int remainder = MathUtils.floorModulo(value, 1440);
if ((remainder == 0) && context.isFullMinute()) {
return (value > 0) ? PlainTime.MAX : PlainTime.MIN;
} else {
return this.withValue(
context, Integer.valueOf(remainder), false);
}
} else if (this.element == SECOND_OF_DAY) {
int remainder = MathUtils.floorModulo(value, 86400);
if ((remainder == 0) && (context.nano == 0)) {
return (value > 0) ? PlainTime.MAX : PlainTime.MIN;
} else {
return this.withValue(
context, Integer.valueOf(remainder), false);
}
} else {
throw new UnsupportedOperationException(this.element.name());
}
}
private static boolean isAM(PlainTime context) {
return ((context.hour < 12) || (context.hour == 24));
}
}
private static class LongElementRule
implements ElementRule<PlainTime, Long> {
//~ Instanzvariablen ----------------------------------------------
private final ChronoElement<Long> element;
private final long min;
private final long max;
//~ Konstruktoren -------------------------------------------------
LongElementRule(
ChronoElement<Long> element,
long min,
long max
) {
super();
this.element = element;
this.min = min;
this.max = max;
}
//~ Methoden ------------------------------------------------------
@Override
public Long getValue(PlainTime context) {
long ret;
if (this.element == MICRO_OF_DAY) {
ret = (context.getNanoOfDay() / KILO);
} else { // NANO_OF_DAY
ret = context.getNanoOfDay();
}
return Long.valueOf(ret);
}
@Override
public PlainTime withValue(
PlainTime context,
Long value,
boolean lenient
) {
if (value == null) {
throw new NullPointerException("Missing element value.");
} else if (lenient) {
return this.withValueInLenientMode(context, value.longValue());
} else if (!this.isValid(context, value)) {
throw new IllegalArgumentException(
"Value out of range: " + value);
}
long v = value.longValue();
if (this.element == MICRO_OF_DAY) {
return PlainTime.createFromMicros(v, context.nano % KILO);
} else { // NANO_OF_DAY
return PlainTime.createFromNanos(v);
}
}
@Override
public boolean isValid(
PlainTime context,
Long value
) {
if (value == null) {
return false;
} else if (
(this.element == MICRO_OF_DAY)
&& (value.longValue() == this.max)
) {
return ((context.nano % KILO) == 0);
} else {
return (
(this.min <= value.longValue())
&& (value.longValue() <= this.max)
);
}
}
@Override
public Long getMinimum(PlainTime context) {
return Long.valueOf(this.min);
}
@Override
public Long getMaximum(PlainTime context) {
if (
(this.element == MICRO_OF_DAY)
&& ((context.nano % KILO) != 0)
) {
return Long.valueOf(this.max - 1);
}
return Long.valueOf(this.max);
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return null;
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return null;
}
private PlainTime withValueInLenientMode(
PlainTime context,
long value
) {
if (this.element == MICRO_OF_DAY) {
long remainder1 = floorMod(value, 86400L * MIO);
int remainder2 = context.nano % KILO;
return
((remainder1 == 0) && (remainder2 == 0) && (value > 0))
? PlainTime.MAX
: PlainTime.createFromMicros(remainder1, remainder2);
} else { // NANO_OF_DAY
long remainder = floorMod(value, 86400L * MRD);
return
((remainder == 0) && (value > 0))
? PlainTime.MAX
: PlainTime.createFromNanos(remainder);
}
}
}
private static class BigDecimalElementRule
implements ElementRule<PlainTime, BigDecimal> {
//~ Instanzvariablen ----------------------------------------------
private final ChronoElement<BigDecimal> element;
private final BigDecimal max;
//~ Konstruktoren -------------------------------------------------
BigDecimalElementRule(
ChronoElement<BigDecimal> element,
BigDecimal max
) {
super();
this.element = element;
this.max = max;
}
//~ Methoden ------------------------------------------------------
@Override
public BigDecimal getValue(PlainTime context) {
BigDecimal val;
if (this.element == DECIMAL_HOUR) {
if (context.equals(PlainTime.MIN)) {
return BigDecimal.ZERO;
} else if (context.hour == 24) {
return DECIMAL_24_0;
}
val =
BigDecimal.valueOf(context.hour)
.add(div(BigDecimal.valueOf(context.minute), DECIMAL_60))
.add(div(BigDecimal.valueOf(context.second), DECIMAL_3600))
.add(
div(
BigDecimal.valueOf(context.nano),
DECIMAL_3600.multiply(DECIMAL_MRD)));
} else if (this.element == DECIMAL_MINUTE) {
if (context.isFullHour()) {
return BigDecimal.ZERO;
}
val =
BigDecimal.valueOf(context.minute)
.add(div(BigDecimal.valueOf(context.second), DECIMAL_60))
.add(
div(
BigDecimal.valueOf(context.nano),
DECIMAL_60.multiply(DECIMAL_MRD)));
} else if (this.element == DECIMAL_SECOND) {
if (context.isFullMinute()) {
return BigDecimal.ZERO;
}
val =
BigDecimal.valueOf(context.second)
.add(div(BigDecimal.valueOf(context.nano), DECIMAL_MRD));
} else {
throw new UnsupportedOperationException(this.element.name());
}
return val.setScale(15, RoundingMode.FLOOR).stripTrailingZeros();
}
@Override
public PlainTime withValue(
PlainTime context,
BigDecimal value,
boolean lenient
) {
BigDecimal bd = value;
int h, m, s, f;
long hv;
if (this.element == DECIMAL_HOUR) {
BigDecimal intH = bd.setScale(0, RoundingMode.FLOOR);
BigDecimal fractionalM = bd.subtract(intH).multiply(DECIMAL_60);
BigDecimal intM = fractionalM.setScale(0, RoundingMode.FLOOR);
BigDecimal fractionalS =
fractionalM.subtract(intM).multiply(DECIMAL_60);
BigDecimal intS = fractionalS.setScale(0, RoundingMode.FLOOR);
hv = intH.longValueExact();
m = intM.intValue();
s = intS.intValue();
f = toNano(fractionalS.subtract(intS));
} else if (this.element == DECIMAL_MINUTE) {
BigDecimal totalM = bd.setScale(0, RoundingMode.FLOOR);
BigDecimal fractionalS =
bd.subtract(totalM).multiply(DECIMAL_60);
BigDecimal intS = fractionalS.setScale(0, RoundingMode.FLOOR);
s = intS.intValue();
f = toNano(fractionalS.subtract(intS));
long minutes = totalM.longValueExact();
hv = context.hour;
if (lenient) {
hv += MathUtils.floorDivide(minutes, 60);
m = MathUtils.floorModulo(minutes, 60);
} else {
checkMinute(minutes);
m = (int) minutes;
}
} else if (this.element == DECIMAL_SECOND) {
BigDecimal totalS = bd.setScale(0, RoundingMode.FLOOR);
f = toNano(bd.subtract(totalS));
long seconds = totalS.longValueExact();
hv = context.hour;
m = context.minute;
if (lenient) {
s = MathUtils.floorModulo(seconds, 60);
long minutes = m + MathUtils.floorDivide(seconds, 60);
hv += MathUtils.floorDivide(minutes, 60);
m = MathUtils.floorModulo(minutes, 60);
} else {
checkSecond(seconds);
s = (int) seconds;
}
} else {
throw new UnsupportedOperationException(this.element.name());
}
if (lenient) {
h = MathUtils.floorModulo(hv, 24);
if ((hv > 0) && ((h | m | s | f) == 0)) {
return PlainTime.MAX;
}
} else if (hv < 0 || hv > 24) {
throw new IllegalArgumentException(
"Value out of range: " + value);
} else {
h = (int) hv;
}
return PlainTime.of(h, m, s, f);
}
@Override
public boolean isValid(
PlainTime context,
BigDecimal value
) {
if (value == null) {
return false;
}
if (context.hour == 24) {
if (
(this.element == DECIMAL_MINUTE)
|| (this.element == DECIMAL_SECOND)
) {
return (BigDecimal.ZERO.compareTo(value) == 0);
}
}
return (
(BigDecimal.ZERO.compareTo(value) <= 0)
&& (this.max.compareTo(value) >= 0)
);
}
@Override
public BigDecimal getMinimum(PlainTime context) {
return BigDecimal.ZERO;
}
@Override
public BigDecimal getMaximum(PlainTime context) {
if (context.hour == 24) {
if (
(this.element == DECIMAL_MINUTE)
|| (this.element == DECIMAL_SECOND)
) {
return BigDecimal.ZERO;
}
}
return this.max;
}
@Override
public ChronoElement<?> getChildAtFloor(PlainTime context) {
return null; // never called
}
@Override
public ChronoElement<?> getChildAtCeiling(PlainTime context) {
return null; // never called
}
private static BigDecimal div(
BigDecimal value,
BigDecimal factor
) {
return value.divide(factor, 16, RoundingMode.FLOOR);
}
private static int toNano(BigDecimal fractionOfSecond) {
// Dezimalwert fast immer etwas zu klein => Aufrunden notwendig
BigDecimal result =
fractionOfSecond.movePointRight(9).setScale(
0,
RoundingMode.HALF_UP);
return Math.min(MRD - 1, result.intValue());
}
}
private static class Merger
implements ChronoMerger<PlainTime> {
//~ Methoden ------------------------------------------------------
@Override
public PlainTime createFrom(
TimeSource<?> clock,
final AttributeQuery attributes
) {
Timezone zone;
if (attributes.contains(Attributes.TIMEZONE_ID)) {
zone = Timezone.of(attributes.get(Attributes.TIMEZONE_ID));
} else {
zone = Timezone.ofSystem();
}
final UnixTime ut = clock.currentTime();
return PlainTime.from(ut, zone.getOffset(ut));
}
// Löst bevorzugt Elemente auf, die in Format-Patterns vorkommen
@Override
public PlainTime createFrom(
ChronoEntity<?> entity,
AttributeQuery attributes,
boolean preparsing
) {
if (entity instanceof UnixTime) {
return PlainTimestamp.axis()
.createFrom(entity, attributes, preparsing).getWallTime();
}
// Uhrzeit bereits vorhanden? -------------------------------------
if (entity.contains(WALL_TIME)) {
return entity.get(WALL_TIME);
}
// Stundenteil ----------------------------------------------------
if (entity.contains(DECIMAL_HOUR)) {
return PlainTime.of(entity.get(DECIMAL_HOUR));
}
Leniency leniency =
attributes.get(Attributes.LENIENCY, Leniency.SMART);
int hour = 0;
if (entity.contains(ISO_HOUR)) {
hour = entity.get(ISO_HOUR).intValue();
} else {
Integer h = readHour(entity);
if (h == null) {
return readSpecialCases(entity);
}
hour = h.intValue();
if (
(hour == 24)
&& !leniency.isLax()
) {
flagValidationError(
entity,
"Time 24:00 not allowed, "
+ "use lax mode or element ISO_HOUR instead.");
return null;
}
}
// Minutenteil ----------------------------------------------------
if (entity.contains(DECIMAL_MINUTE)) {
return M_DECIMAL_RULE.withValue(
PlainTime.of(hour),
entity.get(DECIMAL_MINUTE),
false
);
}
int minute = 0;
if (entity.contains(MINUTE_OF_HOUR)) {
minute = entity.get(MINUTE_OF_HOUR).intValue();
}
// Sekundenteil ---------------------------------------------------
if (entity.contains(DECIMAL_SECOND)) {
return S_DECIMAL_RULE.withValue(
PlainTime.of(hour, minute),
entity.get(DECIMAL_SECOND),
false
);
}
int second = 0;
if (entity.contains(SECOND_OF_MINUTE)) {
second = entity.get(SECOND_OF_MINUTE).intValue();
}
// Nanoteil -------------------------------------------------------
int nanosecond = 0;
if (entity.contains(NANO_OF_SECOND)) {
nanosecond = entity.get(NANO_OF_SECOND).intValue();
} else if (entity.contains(MICRO_OF_SECOND)) {
nanosecond = entity.get(MICRO_OF_SECOND).intValue() * KILO;
} else if (entity.contains(MILLI_OF_SECOND)) {
nanosecond = entity.get(MILLI_OF_SECOND).intValue() * MIO;
}
// Ergebnis aus Stunde, Minute, Sekunde und Nano ------------------
if (leniency.isLax()) {
long total =
MathUtils.safeAdd(
MathUtils.safeMultiply(
MathUtils.safeAdd(
MathUtils.safeAdd(
MathUtils.safeMultiply(hour, 3600L),
MathUtils.safeMultiply(minute, 60L)),
second
),
MRD
),
nanosecond
);
long nanoOfDay = floorMod(total, 86400L * MRD);
long overflow = floorDiv(total, 86400L * MRD);
if (
(overflow != 0)
&& entity.isValid(LongElement.DAY_OVERFLOW, overflow)
) {
entity.with(LongElement.DAY_OVERFLOW, overflow);
}
if ((nanoOfDay == 0) && (overflow > 0)) {
return PlainTime.MAX;
} else {
return PlainTime.createFromNanos(nanoOfDay);
}
} else if (
(hour >= 0)
&& (minute >= 0)
&& (second >= 0)
&& (nanosecond >= 0)
&& (
((hour == 24) && (minute | second | nanosecond) == 0))
|| (
(hour < 24)
&& (minute <= 59)
&& (second <= 59)
&& (nanosecond <= MRD))
) {
return PlainTime.of(hour, minute, second, nanosecond, false);
} else {
flagValidationError(entity, "Time component out of range.");
return null;
}
}
private static Integer readHour(ChronoEntity<?> entity) {
int hour;
if (entity.contains(DIGITAL_HOUR_OF_DAY)) {
hour = entity.get(DIGITAL_HOUR_OF_DAY).intValue();
} else if (entity.contains(CLOCK_HOUR_OF_DAY)) {
hour = entity.get(CLOCK_HOUR_OF_DAY).intValue();
if (hour == 24) {
hour = 0;
}
} else if (entity.contains(AM_PM_OF_DAY)) {
Meridiem ampm = entity.get(AM_PM_OF_DAY);
if (entity.contains(DIGITAL_HOUR_OF_AMPM)) {
int h = entity.get(DIGITAL_HOUR_OF_AMPM).intValue();
hour = ((ampm == Meridiem.AM) ? h : h + 12);
} else if (
entity.contains(CLOCK_HOUR_OF_AMPM)
) {
int h = entity.get(CLOCK_HOUR_OF_AMPM).intValue();
if (h == 12) {
h = 0;
}
hour = ((ampm == Meridiem.AM) ? h : h + 12);
} else {
return null;
}
} else {
return null;
}
return Integer.valueOf(hour);
}
private static PlainTime readSpecialCases(ChronoEntity<?> entity) {
if (entity.contains(NANO_OF_DAY)) { // Threeten-Symbol N
long nanoOfDay = entity.get(NANO_OF_DAY).longValue();
if ((nanoOfDay < 0) || (nanoOfDay > 86400L * MRD)) {
flagValidationError(
entity,
"NANO_OF_DAY out of range: " + nanoOfDay);
return null;
}
return PlainTime.createFromNanos(nanoOfDay);
} else if (entity.contains(MICRO_OF_DAY)) {
int nanos = 0;
if (entity.contains(NANO_OF_SECOND)) {
nanos = entity.get(NANO_OF_SECOND).intValue() % KILO;
}
return PlainTime.createFromMicros(
entity.get(MICRO_OF_DAY).longValue(),
nanos
);
} else if (entity.contains(MILLI_OF_DAY)) { // CLDR-Symbol A
int submillis = 0;
if (entity.contains(NANO_OF_SECOND)) {
int nanoOfSecond = entity.get(NANO_OF_SECOND).intValue();
if ((nanoOfSecond < 0) || (nanoOfSecond >= MRD)) {
flagValidationError(
entity,
"NANO_OF_SECOND out of range: " + nanoOfSecond);
return null;
}
submillis = nanoOfSecond % MIO;
} else if (entity.contains(MICRO_OF_SECOND)) {
int microOfSecond = entity.get(MICRO_OF_SECOND).intValue();
if ((microOfSecond < 0) || (microOfSecond >= MIO)) {
flagValidationError(
entity,
"MICRO_OF_SECOND out of range: " + microOfSecond);
return null;
}
submillis = microOfSecond % KILO;
}
int milliOfDay = entity.get(MILLI_OF_DAY).intValue();
if ((milliOfDay < 0) || (milliOfDay > 86400 * KILO)) {
flagValidationError(
entity,
"MILLI_OF_DAY out of range: " + milliOfDay);
return null;
}
return PlainTime.createFromMillis(milliOfDay, submillis);
} else if (entity.contains(SECOND_OF_DAY)) {
int nanos = 0;
if (entity.contains(NANO_OF_SECOND)) {
nanos = entity.get(NANO_OF_SECOND).intValue();
} else if (entity.contains(MICRO_OF_SECOND)) {
nanos = entity.get(MICRO_OF_SECOND).intValue() * KILO;
} else if (entity.contains(MILLI_OF_SECOND)) {
nanos = entity.get(MILLI_OF_SECOND).intValue() * MIO;
}
return PlainTime.of(0, 0, 0, nanos).with(
SECOND_OF_DAY,
entity.get(SECOND_OF_DAY));
} else if (entity.contains(MINUTE_OF_DAY)) {
int nanos = 0;
if (entity.contains(NANO_OF_SECOND)) {
nanos = entity.get(NANO_OF_SECOND).intValue();
} else if (entity.contains(MICRO_OF_SECOND)) {
nanos = entity.get(MICRO_OF_SECOND).intValue() * KILO;
} else if (entity.contains(MILLI_OF_SECOND)) {
nanos = entity.get(MILLI_OF_SECOND).intValue() * MIO;
}
int secs = 0;
if (entity.contains(SECOND_OF_MINUTE)) {
secs = entity.get(SECOND_OF_MINUTE).intValue();
}
return PlainTime.of(0, 0, secs, nanos).with(
MINUTE_OF_DAY,
entity.get(MINUTE_OF_DAY));
}
return null;
}
private static void flagValidationError(
ChronoEntity<?> entity,
String message
) {
if (entity.isValid(ValidationElement.ERROR_MESSAGE, message)) {
entity.with(ValidationElement.ERROR_MESSAGE, message);
}
}
@Override
public ChronoDisplay preformat(
PlainTime context,
AttributeQuery attributes
) {
return context;
}
@Override
public Chronology<?> preparser() {
return null;
}
}
}
| remove unnecessary comment
| core/src/main/java/net/time4j/PlainTime.java | remove unnecessary comment |
|
Java | lgpl-2.1 | a68fde5d45dbbccb8ed4d527e1bf458e83ab630e | 0 | open-eid/digidoc4j,antonioaraujob/digidoc4j,keijokapp/digidoc4j,fazz/digidoc4j,open-eid/digidoc4j,fazz/digidoc4j,open-eid/digidoc4j,antonioaraujob/digidoc4j,keijokapp/digidoc4j | package org.digidoc4j.main;
import org.digidoc4j.DigiDoc4JTestHelper;
import org.digidoc4j.api.Container;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.Assertion;
import org.junit.contrib.java.lang.system.ExpectedSystemExit;
import org.junit.contrib.java.lang.system.StandardOutputStreamLog;
import java.nio.file.Files;
import java.nio.file.Paths;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public class DigiDoc4JTest extends DigiDoc4JTestHelper {
@Rule
public final ExpectedSystemExit exit = ExpectedSystemExit.none();
@Rule
public final StandardOutputStreamLog sout = new StandardOutputStreamLog();
@After
public void cleanUp() throws Exception {
Files.deleteIfExists(Paths.get("test1.ddoc"));
}
@Test
public void createsContainerAndSignsIt() throws Exception {
exit.expectSystemExitWithStatus(0);
Files.deleteIfExists(Paths.get("test1.ddoc"));
String[] params = new String[]{"-in", "test1.ddoc", "-add", "testFiles/test.txt", "plain/text", "-pkcs12", "testFiles/signout.p12", "test"};
DigiDoc4J.main(params);
}
@Test
public void createsContainerAndAddsFileWithoutMimeType() throws Exception {
exit.expectSystemExitWithStatus(2);
Files.deleteIfExists(Paths.get("test1.ddoc"));
String[] params = new String[]{"-in", "test1.ddoc", "-add", "testFiles/test.txt", "-pkcs12", "testFiles/signout.p12", "test"};
DigiDoc4J.main(params);
}
@Test
public void commandLineInputCausesDigiDoc4JException() throws Exception {
exit.expectSystemExitWithStatus(1);
String[] params = new String[]{"-in", "NotFoundFile.ddoc", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void removeFileFromContainer() throws Exception {
exit.expectSystemExitWithStatus(0);
Container container = Container.create(Container.DocumentType.DDOC);
container.addDataFile("testFiles/test.txt", "text/plain");
Files.deleteIfExists(Paths.get("test1.ddoc"));
container.save("test1.ddoc");
String[] params = new String[]{"-in", "test1.ddoc", "-remove", "test.txt"};
DigiDoc4J.main(params);
}
@Test
public void verifyValidDDoc() throws Exception {
exit.expectSystemExitWithStatus(0);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() throws Exception {
assertEquals("Signature S0 is valid", sout.getLog().trim());
}
});
String[] params = new String[]{"-in", "testFiles/ddoc_for_testing.ddoc", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void verifyInValidDDoc() throws Exception {
exit.expectSystemExitWithStatus(0);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() throws Exception {
assertThat(sout.getLog(), containsString("Signature S0 is not valid"));
}
});
sout.clear();
String[] params = new String[]{"-in", "testFiles/changed_digidoc_test.ddoc", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void bDocContainerTypeNotYetSupported() throws Exception {
exit.expectSystemExitWithStatus(2);
String[] params = new String[]{"-in", "test1.bdoc", "-type", "BDOC"};
DigiDoc4J.main(params);
}
@Test
public void verifyDDocWithoutSignature() throws Exception {
exit.expectSystemExitWithStatus(1);
String[] params = new String[]{"-in", "testFiles/no_signed_doc_no_signature", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void verifyDDocWithEmptyContainer() throws Exception {
exit.expectSystemExitWithStatus(1);
String[] params = new String[]{"-in", "testFiles/empty_container_no_signature", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void showsUsage() throws Exception {
exit.expectSystemExitWithStatus(2);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() throws Exception {
assertThat(sout.getLog(), containsString("usage: digidoc4j"));
}
});
DigiDoc4J.main(new String[]{});
}
} | test/org/digidoc4j/main/DigiDoc4JTest.java | package org.digidoc4j.main;
import org.digidoc4j.DigiDoc4JTestHelper;
import org.digidoc4j.api.Container;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.Assertion;
import org.junit.contrib.java.lang.system.ExpectedSystemExit;
import org.junit.contrib.java.lang.system.StandardOutputStreamLog;
import java.nio.file.Files;
import java.nio.file.Paths;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public class DigiDoc4JTest extends DigiDoc4JTestHelper {
@Rule
public final ExpectedSystemExit exit = ExpectedSystemExit.none();
@Rule
public final StandardOutputStreamLog sout = new StandardOutputStreamLog();
@Test
public void createsContainerAndSignsIt() throws Exception {
exit.expectSystemExitWithStatus(0);
Files.deleteIfExists(Paths.get("test1.ddoc"));
String[] params = new String[]{"-in", "test1.ddoc", "-add", "testFiles/test.txt", "plain/text", "-pkcs12", "testFiles/signout.p12", "test"};
DigiDoc4J.main(params);
}
@Test
public void createsContainerAndAddsFileWithoutMimeType() throws Exception {
exit.expectSystemExitWithStatus(2);
Files.deleteIfExists(Paths.get("test1.ddoc"));
String[] params = new String[]{"-in", "test1.ddoc", "-add", "testFiles/test.txt", "-pkcs12", "testFiles/signout.p12", "test"};
DigiDoc4J.main(params);
}
@Test
public void commandLineInputCausesDigiDoc4JException() throws Exception {
exit.expectSystemExitWithStatus(1);
String[] params = new String[]{"-in", "NotFoundFile.ddoc", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void removeFileFromContainer() throws Exception {
exit.expectSystemExitWithStatus(0);
Container container = Container.create(Container.DocumentType.DDOC);
container.addDataFile("testFiles/test.txt", "text/plain");
Files.deleteIfExists(Paths.get("test1.ddoc"));
container.save("test1.ddoc");
String[] params = new String[]{"-in", "test1.ddoc", "-remove", "test.txt"};
DigiDoc4J.main(params);
}
@Test
public void verifyValidDDoc() throws Exception {
exit.expectSystemExitWithStatus(0);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() throws Exception {
assertEquals("Signature S0 is valid", sout.getLog().trim());
}
});
String[] params = new String[]{"-in", "testFiles/ddoc_for_testing.ddoc", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void verifyInValidDDoc() throws Exception {
exit.expectSystemExitWithStatus(0);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() throws Exception {
assertThat(sout.getLog(), containsString("Signature S0 is not valid"));
}
});
sout.clear();
String[] params = new String[]{"-in", "testFiles/changed_digidoc_test.ddoc", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void bDocContainerTypeNotYetSupported() throws Exception {
exit.expectSystemExitWithStatus(2);
String[] params = new String[]{"-in", "test1.bdoc", "-type", "BDOC"};
DigiDoc4J.main(params);
}
@Test
public void verifyDDocWithoutSignature() throws Exception {
exit.expectSystemExitWithStatus(1);
String[] params = new String[]{"-in", "testFiles/no_signed_doc_no_signature", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void verifyDDocWithEmptyContainer() throws Exception {
exit.expectSystemExitWithStatus(1);
String[] params = new String[]{"-in", "testFiles/empty_container_no_signature", "-verify"};
DigiDoc4J.main(params);
}
@Test
public void showsUsage() throws Exception {
exit.expectSystemExitWithStatus(2);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() throws Exception {
assertThat(sout.getLog(), containsString("usage: digidoc4j"));
}
});
DigiDoc4J.main(new String[]{});
}
} | Updated tests
| test/org/digidoc4j/main/DigiDoc4JTest.java | Updated tests |
|
Java | apache-2.0 | c9252b0777366cb1912539f20d165bb1a2356e78 | 0 | tateshitah/jspwiki,tateshitah/jspwiki,tateshitah/jspwiki,tateshitah/jspwiki | /*
JSPWiki - a JSP-based WikiWiki clone.
Copyright (C) 2001-2004 Janne Jalkanen ([email protected]),
Erik Bunn ([email protected])
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.ecyrd.jspwiki;
import java.io.*;
import java.util.*;
import org.apache.commons.lang.time.StopWatch;
import org.apache.log4j.Logger;
import com.ecyrd.jspwiki.attachment.Attachment;
import com.ecyrd.jspwiki.filters.BasicPageFilter;
import com.ecyrd.jspwiki.providers.ProviderException;
import com.ecyrd.jspwiki.providers.WikiPageProvider;
/*
BUGS
- if a wikilink is added to a page, then removed, RefMan still thinks that
the page refers to the wikilink page. Hm.
- if a page is deleted, gets very confused.
- Serialization causes page attributes to be missing, when InitializablePlugins
are not executed properly. Thus, serialization should really also mark whether
a page is serializable or not...
*/
/*
A word about synchronizing:
I expect this object to be accessed in three situations:
- when a WikiEngine is created and it scans its wikipages
- when the WE saves a page
- when a JSP page accesses one of the WE's ReferenceManagers
to display a list of (un)referenced pages.
So, access to this class is fairly rare, and usually triggered by
user interaction. OTOH, the methods in this class use their storage
objects intensively (and, sorry to say, in an unoptimized manner =).
My deduction: using unsynchronized HashMaps etc and syncing methods
or code blocks is preferrable to using slow, synced storage objects.
We don't have iterative code here, so I'm going to use synced methods
for now.
Please contact me if you notice problems with ReferenceManager, and
especially with synchronization, or if you have suggestions about
syncing.
[email protected]
*/
/**
* Keeps track of wikipage references:
* <UL>
* <LI>What pages a given page refers to
* <LI>What pages refer to a given page
* </UL>
*
* This is a quick'n'dirty approach without any finesse in storage and
* searching algorithms; we trust java.util.*.
* <P>
* This class contains two HashMaps, m_refersTo and m_referredBy. The
* first is indexed by WikiPage names and contains a Collection of all
* WikiPages the page refers to. (Multiple references are not counted,
* naturally.) The second is indexed by WikiPage names and contains
* a Set of all pages that refer to the indexing page. (Notice -
* the keys of both Maps should be kept in sync.)
* <P>
* When a page is added or edited, its references are parsed, a Collection
* is received, and we crudely replace anything previous with this new
* Collection. We then check each referenced page name and make sure they
* know they are referred to by the new page.
* <P>
* Based on this information, we can perform non-optimal searches for
* e.g. unreferenced pages, top ten lists, etc.
* <P>
* The owning class must take responsibility of filling in any pre-existing
* information, probably by loading each and every WikiPage and calling this
* class to update the references when created.
*
* @author [email protected]
* @since 1.6.1
*/
public class ReferenceManager
extends BasicPageFilter
{
/** Maps page wikiname to a Collection of pages it refers to. The Collection
* must contain Strings. The Collection may contain names of non-existing
* pages.
*/
private Map m_refersTo;
private Map m_unmutableRefersTo;
/** Maps page wikiname to a Set of referring pages. The Set must
* contain Strings. Non-existing pages (a reference exists, but not a file
* for the page contents) may have an empty Set in m_referredBy.
*/
private Map m_referredBy;
private Map m_unmutableReferredBy;
/** The WikiEngine that owns this object. */
private WikiEngine m_engine;
private boolean m_matchEnglishPlurals = false;
private static Logger log = Logger.getLogger(ReferenceManager.class);
private static final String SERIALIZATION_FILE = "refmgr.ser";
/** We use this also a generic serialization id */
private static final long serialVersionUID = 1L;
/**
* Builds a new ReferenceManager.
*
* @param engine The WikiEngine to which this is managing references to.
*/
public ReferenceManager( WikiEngine engine )
{
m_refersTo = new HashMap();
m_referredBy = new HashMap();
m_engine = engine;
m_matchEnglishPlurals = TextUtil.getBooleanProperty( engine.getWikiProperties(),
WikiEngine.PROP_MATCHPLURALS,
m_matchEnglishPlurals );
//
// Create two maps that contain unmutable versions of the two basic maps.
//
m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy );
m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo );
}
/**
* Does a full reference update.
*/
private void updatePageReferences( WikiPage page )
throws ProviderException
{
String content = m_engine.getPageManager().getPageText( page.getName(),
WikiPageProvider.LATEST_VERSION );
TreeSet res = new TreeSet();
Collection links = m_engine.scanWikiLinks( page, content );
res.addAll( links );
Collection attachments = m_engine.getAttachmentManager().listAttachments( page );
for( Iterator atti = attachments.iterator(); atti.hasNext(); )
{
res.add( ((Attachment)(atti.next())).getName() );
}
updateReferences( page.getName(), res );
}
/**
* Initializes the entire reference manager with the initial set of pages
* from the collection.
*
* @param pages A collection of all pages you want to be included in the reference
* count.
* @since 2.2
*/
public void initialize( Collection pages )
throws ProviderException
{
log.debug( "Initializing new ReferenceManager with "+pages.size()+" initial pages." );
StopWatch sw = new StopWatch();
sw.start();
log.info( "Starting cross reference scan of WikiPages" );
//
// First, try to serialize old data from disk. If that fails,
// we'll go and update the entire reference lists (which'll take
// time)
//
try
{
long saved = unserializeFromDisk();
//
// Now we must check if any of the pages have been changed
// while we were in the electronic la-la-land, and update
// the references for them.
//
Iterator it = pages.iterator();
while( it.hasNext() )
{
WikiPage page = (WikiPage) it.next();
if( page instanceof Attachment )
{
// Skip attachments
}
else
{
// Refresh with the latest copy
page = m_engine.getPage( page.getName() );
if( page.getLastModified() == null )
{
log.fatal( "Provider returns null lastModified. Please submit a bug report." );
}
else if( page.getLastModified().getTime() > saved )
{
updatePageReferences( page );
}
}
}
}
catch( Exception e )
{
log.info("Unable to unserialize old refmgr information, rebuilding database: "+e.getMessage());
buildKeyLists( pages );
// Scan the existing pages from disk and update references in the manager.
Iterator it = pages.iterator();
while( it.hasNext() )
{
WikiPage page = (WikiPage)it.next();
if( page instanceof Attachment )
{
// We cannot build a reference list from the contents
// of attachments, so we skip them.
}
else
{
updatePageReferences( page );
}
}
serializeToDisk();
}
sw.stop();
log.info( "Cross reference scan done in "+sw );
}
/**
* Reads the serialized data from the disk back to memory.
* Returns the date when the data was last written on disk
*/
private synchronized long unserializeFromDisk()
throws IOException,
ClassNotFoundException
{
ObjectInputStream in = null;
long saved = 0L;
try
{
StopWatch sw = new StopWatch();
sw.start();
File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );
in = new ObjectInputStream( new BufferedInputStream(new FileInputStream(f)) );
long ver = in.readLong();
if( ver != serialVersionUID )
{
throw new IOException("File format has changed; I need to recalculate references.");
}
saved = in.readLong();
m_refersTo = (Map) in.readObject();
m_referredBy = (Map) in.readObject();
in.close();
m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy );
m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo );
sw.stop();
log.debug("Read serialized data successfully in "+sw);
}
finally
{
try {
if( in != null ) in.close();
} catch( IOException ex ) {}
}
return saved;
}
/**
* Serializes hashmaps to disk. The format is private, don't touch it.
*/
private synchronized void serializeToDisk()
{
ObjectOutputStream out = null;
try
{
StopWatch sw = new StopWatch();
sw.start();
File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );
out = new ObjectOutputStream( new BufferedOutputStream(new FileOutputStream(f)) );
out.writeLong( serialVersionUID );
out.writeLong( System.currentTimeMillis() ); // Timestamp
out.writeObject( m_refersTo );
out.writeObject( m_referredBy );
out.close();
sw.stop();
log.debug("serialization done - took "+sw);
}
catch( IOException e )
{
log.error("Unable to serialize!");
try {
if( out != null ) out.close();
} catch( IOException ex ) {}
}
}
/**
* After the page has been saved, updates the reference lists.
*/
public void postSave( WikiContext context, String content )
{
WikiPage page = context.getPage();
updateReferences( page.getName(),
context.getEngine().scanWikiLinks( page, content ) );
serializeToDisk();
}
/**
* Updates the m_referedTo and m_referredBy hashmaps when a page has been
* deleted.
* <P>
* Within the m_refersTo map the pagename is a key. The whole key-value-set
* has to be removed to keep the map clean.
* Within the m_referredBy map the name is stored as a value. Since a key
* can have more than one value we have to delete just the key-value-pair
* referring page:deleted page.
*
* @param page Name of the page to remove from the maps.
*/
public synchronized void pageRemoved( WikiPage page )
{
String pageName = page.getName();
Collection RefTo = (Collection)m_refersTo.get( pageName );
Iterator it_refTo = RefTo.iterator();
while( it_refTo.hasNext() )
{
String referredPageName = (String)it_refTo.next();
Set refBy = (Set)m_referredBy.get( referredPageName );
log.debug("Before cleaning m_referredBy HashMap key:value "+referredPageName+":"+m_referredBy.get( referredPageName ));
refBy.remove(pageName);
m_referredBy.remove( referredPageName );
m_referredBy.put( referredPageName, refBy );
log.debug("After cleaning m_referredBy HashMap key:value "+referredPageName+":"+m_referredBy.get( referredPageName ));
}
log.debug("Removing from m_refersTo HashMap key:value "+pageName+":"+m_refersTo.get( pageName ));
m_refersTo.remove( pageName );
}
/**
* Updates the referred pages of a new or edited WikiPage. If a refersTo
* entry for this page already exists, it is removed and a new one is built
* from scratch. Also calls updateReferredBy() for each referenced page.
* <P>
* This is the method to call when a new page has been created and we
* want to a) set up its references and b) notify the referred pages
* of the references. Use this method during run-time.
*
* @param page Name of the page to update.
* @param references A Collection of Strings, each one pointing to a page this page references.
*/
public synchronized void updateReferences( String page, Collection references )
{
//
// Create a new entry in m_refersTo.
//
Collection oldRefTo = (Collection)m_refersTo.get( page );
m_refersTo.remove( page );
m_refersTo.put( page, references );
//
// We know the page exists, since it's making references somewhere.
// If an entry for it didn't exist previously in m_referredBy, make
// sure one is added now.
//
if( !m_referredBy.containsKey( page ) )
{
m_referredBy.put( page, new TreeSet() );
}
//
// Get all pages that used to be referred to by 'page' and
// remove that reference. (We don't want to try to figure out
// which particular references were removed...)
//
cleanReferredBy( page, oldRefTo, references );
//
// Notify all referred pages of their referinesshoodicity.
//
Iterator it = references.iterator();
while( it.hasNext() )
{
String referredPageName = (String)it.next();
updateReferredBy( referredPageName, page );
}
}
/**
* Returns the refers-to list. For debugging.
*/
protected Map getRefersTo()
{
return( m_refersTo );
}
/**
* Returns the referred-by list. For debugging.
*/
protected Map getReferredBy()
{
return( m_referredBy );
}
/**
* Cleans the 'referred by' list, removing references by 'referrer' to
* any other page. Called after 'referrer' is removed.
*/
private void cleanReferredBy( String referrer,
Collection oldReferred,
Collection newReferred )
{
// Two ways to go about this. One is to look up all pages previously
// referred by referrer and remove referrer from their lists, and let
// the update put them back in (except possibly removed ones).
// The other is to get the old referred to list, compare to the new,
// and tell the ones missing in the latter to remove referrer from
// their list. Hm. We'll just try the first for now. Need to come
// back and optimize this a bit.
if( oldReferred == null )
return;
Iterator it = oldReferred.iterator();
while( it.hasNext() )
{
String referredPage = (String)it.next();
Set oldRefBy = (Set)m_referredBy.get( referredPage );
if( oldRefBy != null )
{
oldRefBy.remove( referrer );
}
// If the page is referred to by no one AND it doesn't even
// exist, we might just as well forget about this entry.
// It will be added again elsewhere if new references appear.
if( ( ( oldRefBy == null ) || ( oldRefBy.isEmpty() ) ) &&
( m_engine.pageExists( referredPage ) == false ) )
{
m_referredBy.remove( referredPage );
}
}
}
/**
* When initially building a ReferenceManager from scratch, call this method
* BEFORE calling updateReferences() with a full list of existing page names.
* It builds the refersTo and referredBy key lists, thus enabling
* updateReferences() to function correctly.
* <P>
* This method should NEVER be called after initialization. It clears all mappings
* from the reference tables.
*
* @param pages a Collection containing WikiPage objects.
*/
private synchronized void buildKeyLists( Collection pages )
{
m_refersTo.clear();
m_referredBy.clear();
if( pages == null )
return;
Iterator it = pages.iterator();
try
{
while( it.hasNext() )
{
WikiPage page = (WikiPage)it.next();
// We add a non-null entry to referredBy to indicate the referred page exists
m_referredBy.put( page.getName(), new TreeSet() );
// Just add a key to refersTo; the keys need to be in sync with referredBy.
m_refersTo.put( page.getName(), null );
}
}
catch( ClassCastException e )
{
log.fatal( "Invalid collection entry in ReferenceManager.buildKeyLists().", e );
}
}
/**
* Marks the page as referred to by the referrer. If the page does not
* exist previously, nothing is done. (This means that some page, somewhere,
* has a link to a page that does not exist.)
* <P>
* This method is NOT synchronized. It should only be referred to from
* within a synchronized method, or it should be made synced if necessary.
*/
private void updateReferredBy( String page, String referrer )
{
// We're not really interested in first level self-references.
if( page.equals( referrer ) )
{
return;
}
// Neither are we interested if plural forms refer to each other.
if( m_matchEnglishPlurals )
{
String p2 = page.endsWith("s") ? page.substring(0,page.length()-1) : page+"s";
if( referrer.equals(p2) )
{
return;
}
}
Set referrers = (Set)m_referredBy.get( page );
// Even if 'page' has not been created yet, it can still be referenced.
// This requires we don't use m_referredBy keys when looking up missing
// pages, of course.
if(referrers == null)
{
referrers = new TreeSet();
m_referredBy.put( page, referrers );
}
referrers.add( referrer );
}
/**
* Clears the references to a certain page so it's no longer in the map.
*
* @param pagename Name of the page to clear references for.
*/
public synchronized void clearPageEntries( String pagename )
{
m_referredBy.remove(pagename);
}
/**
* Finds all unreferenced pages. This requires a linear scan through
* m_referredBy to locate keys with null or empty values.
*/
public synchronized Collection findUnreferenced()
{
ArrayList unref = new ArrayList();
Set keys = m_referredBy.keySet();
Iterator it = keys.iterator();
while( it.hasNext() )
{
String key = (String) it.next();
//Set refs = (Set) m_referredBy.get( key );
Set refs = getReferenceList( m_referredBy, key );
if( refs == null || refs.isEmpty() )
{
unref.add( key );
}
}
return unref;
}
/**
* Finds all references to non-existant pages. This requires a linear
* scan through m_refersTo values; each value must have a corresponding
* key entry in the reference Maps, otherwise such a page has never
* been created.
* <P>
* Returns a Collection containing Strings of unreferenced page names.
* Each non-existant page name is shown only once - we don't return information
* on who referred to it.
*/
public synchronized Collection findUncreated()
{
TreeSet uncreated = new TreeSet();
// Go through m_refersTo values and check that m_refersTo has the corresponding keys.
// We want to reread the code to make sure our HashMaps are in sync...
Collection allReferences = m_refersTo.values();
Iterator it = allReferences.iterator();
while( it.hasNext() )
{
Collection refs = (Collection)it.next();
if( refs != null )
{
Iterator rit = refs.iterator();
while( rit.hasNext() )
{
String aReference = (String)rit.next();
if( m_engine.pageExists( aReference ) == false )
{
uncreated.add( aReference );
}
}
}
}
return uncreated;
}
/**
* Searches for the given page in the given Map.
*/
private Set getReferenceList( Map coll, String pagename )
{
Set refs = (Set)coll.get( pagename );
if( m_matchEnglishPlurals )
{
//
// We'll add also matches from the "other" page.
//
Set refs2;
if( pagename.endsWith("s") )
{
refs2 = (Set)coll.get( pagename.substring(0,pagename.length()-1) );
}
else
{
refs2 = (Set)coll.get( pagename+"s" );
}
if( refs2 != null )
{
if( refs != null )
refs.addAll( refs2 );
else
refs = refs2;
}
}
return refs;
}
/**
* Find all pages that refer to this page. Returns null if the page
* does not exist or is not referenced at all, otherwise returns a
* collection containing page names (String) that refer to this one.
* <p>
* @param pagename The page to find referrers for.
* @return A Collection of Strings. (This is, in fact, a Set, and is likely
* to change at some point to a Set). May return null, if the page
* does not exist, or if it has no references.
*/
// FIXME: Return a Set instead of a Collection.
public synchronized Collection findReferrers( String pagename )
{
Set refs = getReferenceList( m_referredBy, pagename );
if( refs == null || refs.isEmpty() )
{
return null;
}
return refs;
}
/**
* Returns all pages that refer to this page. Note that this method
* returns an unmodifiable Map, which may be abruptly changed. So any
* access to any iterator may result in a ConcurrentModificationException.
* <p>
* The advantages of using this method over findReferrers() is that
* it is very fast, as it does not create a new object. The disadvantages
* are that it does not do any mapping between plural names, and you
* may end up getting a ConcurrentModificationException.
*
* @param pageName Page name to query.
* @return A Set of Strings containing the names of all the pages that refer
* to this page. May return null, if the page does not exist or
* has not been indexed yet.
* @since 2.2.33
*/
public Set findReferredBy( String pageName )
{
return (Set)m_unmutableReferredBy.get( pageName );
}
/**
* Returns all pages that this page refers to. You can use this as a quick
* way of getting the links from a page, but note that it does not link any
* InterWiki, image, or external links. It does contain attachments, though.
* <p>
* The Collection returned is unmutable, so you cannot change it. It does reflect
* the current status and thus is a live object. So, if you are using any
* kind of an iterator on it, be prepared for ConcurrentModificationExceptions.
* <p>
* The returned value is a Collection, because a page may refer to another page
* multiple times.
*
* @param pageName Page name to query
* @return A Collection of Strings containing the names of the pages that this page
* refers to. May return null, if the page does not exist or has not
* been indexed yet.
* @since 2.2.33
*/
public Collection findRefersTo( String pageName )
{
return (Collection)m_unmutableRefersTo.get( pageName );
}
}
| src/com/ecyrd/jspwiki/ReferenceManager.java | /*
JSPWiki - a JSP-based WikiWiki clone.
Copyright (C) 2001-2004 Janne Jalkanen ([email protected]),
Erik Bunn ([email protected])
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.ecyrd.jspwiki;
import java.util.*;
import java.io.*;
import org.apache.log4j.*;
import com.ecyrd.jspwiki.filters.BasicPageFilter;
import com.ecyrd.jspwiki.attachment.*;
import com.ecyrd.jspwiki.providers.*;
/*
BUGS
- if a wikilink is added to a page, then removed, RefMan still thinks that
the page refers to the wikilink page. Hm.
- if a page is deleted, gets very confused.
- Serialization causes page attributes to be missing, when InitializablePlugins
are not executed properly. Thus, serialization should really also mark whether
a page is serializable or not...
*/
/*
A word about synchronizing:
I expect this object to be accessed in three situations:
- when a WikiEngine is created and it scans its wikipages
- when the WE saves a page
- when a JSP page accesses one of the WE's ReferenceManagers
to display a list of (un)referenced pages.
So, access to this class is fairly rare, and usually triggered by
user interaction. OTOH, the methods in this class use their storage
objects intensively (and, sorry to say, in an unoptimized manner =).
My deduction: using unsynchronized HashMaps etc and syncing methods
or code blocks is preferrable to using slow, synced storage objects.
We don't have iterative code here, so I'm going to use synced methods
for now.
Please contact me if you notice problems with ReferenceManager, and
especially with synchronization, or if you have suggestions about
syncing.
[email protected]
*/
/**
* Keeps track of wikipage references:
* <UL>
* <LI>What pages a given page refers to
* <LI>What pages refer to a given page
* </UL>
*
* This is a quick'n'dirty approach without any finesse in storage and
* searching algorithms; we trust java.util.*.
* <P>
* This class contains two HashMaps, m_refersTo and m_referredBy. The
* first is indexed by WikiPage names and contains a Collection of all
* WikiPages the page refers to. (Multiple references are not counted,
* naturally.) The second is indexed by WikiPage names and contains
* a Set of all pages that refer to the indexing page. (Notice -
* the keys of both Maps should be kept in sync.)
* <P>
* When a page is added or edited, its references are parsed, a Collection
* is received, and we crudely replace anything previous with this new
* Collection. We then check each referenced page name and make sure they
* know they are referred to by the new page.
* <P>
* Based on this information, we can perform non-optimal searches for
* e.g. unreferenced pages, top ten lists, etc.
* <P>
* The owning class must take responsibility of filling in any pre-existing
* information, probably by loading each and every WikiPage and calling this
* class to update the references when created.
*
* @author [email protected]
* @since 1.6.1
*/
public class ReferenceManager
extends BasicPageFilter
{
/** Maps page wikiname to a Collection of pages it refers to. The Collection
* must contain Strings. The Collection may contain names of non-existing
* pages.
*/
private Map m_refersTo;
private Map m_unmutableRefersTo;
/** Maps page wikiname to a Set of referring pages. The Set must
* contain Strings. Non-existing pages (a reference exists, but not a file
* for the page contents) may have an empty Set in m_referredBy.
*/
private Map m_referredBy;
private Map m_unmutableReferredBy;
/** The WikiEngine that owns this object. */
private WikiEngine m_engine;
private boolean m_matchEnglishPlurals = false;
private static Logger log = Logger.getLogger(ReferenceManager.class);
private static final String SERIALIZATION_FILE = "refmgr.ser";
/** We use this also a generic serialization id */
private static final long serialVersionUID = 1L;
/**
* Builds a new ReferenceManager.
*
* @param engine The WikiEngine to which this is managing references to.
*/
public ReferenceManager( WikiEngine engine )
{
m_refersTo = new HashMap();
m_referredBy = new HashMap();
m_engine = engine;
m_matchEnglishPlurals = TextUtil.getBooleanProperty( engine.getWikiProperties(),
WikiEngine.PROP_MATCHPLURALS,
m_matchEnglishPlurals );
//
// Create two maps that contain unmutable versions of the two basic maps.
//
m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy );
m_unmutableRefersTo = Collections.unmodifiableMap( m_refersTo );
}
/**
* Does a full reference update.
*/
private void updatePageReferences( WikiPage page )
throws ProviderException
{
String content = m_engine.getPageManager().getPageText( page.getName(),
WikiPageProvider.LATEST_VERSION );
TreeSet res = new TreeSet();
Collection links = m_engine.scanWikiLinks( page, content );
res.addAll( links );
Collection attachments = m_engine.getAttachmentManager().listAttachments( page );
for( Iterator atti = attachments.iterator(); atti.hasNext(); )
{
res.add( ((Attachment)(atti.next())).getName() );
}
updateReferences( page.getName(), res );
}
/**
* Initializes the entire reference manager with the initial set of pages
* from the collection.
*
* @param pages A collection of all pages you want to be included in the reference
* count.
* @since 2.2
*/
public void initialize( Collection pages )
throws ProviderException
{
log.debug( "Initializing new ReferenceManager with "+pages.size()+" initial pages." );
long start = System.currentTimeMillis();
log.info( "Starting cross reference scan of WikiPages" );
//
// First, try to serialize old data from disk. If that fails,
// we'll go and update the entire reference lists (which'll take
// time)
//
try
{
long saved = unserializeFromDisk();
//
// Now we must check if any of the pages have been changed
// while we were in the electronic la-la-land, and update
// the references for them.
//
Iterator it = pages.iterator();
while( it.hasNext() )
{
WikiPage page = (WikiPage) it.next();
if( page instanceof Attachment )
{
// Skip attachments
}
else
{
// Refresh with the latest copy
page = m_engine.getPage( page.getName() );
if( page.getLastModified() == null )
{
log.fatal( "Provider returns null lastModified. Please submit a bug report." );
}
else if( page.getLastModified().getTime() > saved )
{
updatePageReferences( page );
}
}
}
}
catch( Exception e )
{
log.info("Unable to unserialize old refmgr information, rebuilding database: "+e.getMessage());
buildKeyLists( pages );
// Scan the existing pages from disk and update references in the manager.
Iterator it = pages.iterator();
while( it.hasNext() )
{
WikiPage page = (WikiPage)it.next();
if( page instanceof Attachment )
{
// We cannot build a reference list from the contents
// of attachments, so we skip them.
}
else
{
updatePageReferences( page );
}
}
serializeToDisk();
}
log.info( "Cross reference scan done (" +
(System.currentTimeMillis()-start) +
" ms)" );
}
/**
* Reads the serialized data from the disk back to memory.
* Returns the date when the data was last written on disk
*/
private synchronized long unserializeFromDisk()
throws IOException,
ClassNotFoundException
{
ObjectInputStream in = null;
long saved = 0L;
try
{
long start = System.currentTimeMillis();
File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );
in = new ObjectInputStream( new BufferedInputStream(new FileInputStream(f)) );
long ver = in.readLong();
if( ver != serialVersionUID )
{
throw new IOException("File format has changed; I need to recalculate references.");
}
saved = in.readLong();
m_refersTo = (Map) in.readObject();
m_referredBy = (Map) in.readObject();
in.close();
long finish = System.currentTimeMillis();
log.debug("Read serialized data successfully in "+(finish-start)+"ms");
}
finally
{
try {
if( in != null ) in.close();
} catch( IOException ex ) {}
}
return saved;
}
/**
* Serializes hashmaps to disk. The format is private, don't touch it.
*/
private synchronized void serializeToDisk()
{
ObjectOutputStream out = null;
try
{
long start = System.currentTimeMillis();
File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );
out = new ObjectOutputStream( new BufferedOutputStream(new FileOutputStream(f)) );
out.writeLong( serialVersionUID );
out.writeLong( System.currentTimeMillis() ); // Timestamp
out.writeObject( m_refersTo );
out.writeObject( m_referredBy );
out.close();
long finish = System.currentTimeMillis();
log.debug("serialization done - took "+(finish-start)+"ms");
}
catch( IOException e )
{
log.error("Unable to serialize!");
try {
if( out != null ) out.close();
} catch( IOException ex ) {}
}
}
/**
* After the page has been saved, updates the reference lists.
*/
public void postSave( WikiContext context, String content )
{
WikiPage page = context.getPage();
updateReferences( page.getName(),
context.getEngine().scanWikiLinks( page, content ) );
serializeToDisk();
}
/**
* Updates the m_referedTo and m_referredBy hashmaps when a page has been
* deleted.
* <P>
* Within the m_refersTo map the pagename is a key. The whole key-value-set
* has to be removed to keep the map clean.
* Within the m_referredBy map the name is stored as a value. Since a key
* can have more than one value we have to delete just the key-value-pair
* referring page:deleted page.
*
* @param page Name of the page to remove from the maps.
*/
public synchronized void pageRemoved( WikiPage page )
{
String pageName = page.getName();
Collection RefTo = (Collection)m_refersTo.get( pageName );
Iterator it_refTo = RefTo.iterator();
while( it_refTo.hasNext() )
{
String referredPageName = (String)it_refTo.next();
Set refBy = (Set)m_referredBy.get( referredPageName );
log.debug("Before cleaning m_referredBy HashMap key:value "+referredPageName+":"+m_referredBy.get( referredPageName ));
refBy.remove(pageName);
m_referredBy.remove( referredPageName );
m_referredBy.put( referredPageName, refBy );
log.debug("After cleaning m_referredBy HashMap key:value "+referredPageName+":"+m_referredBy.get( referredPageName ));
}
log.debug("Removing from m_refersTo HashMap key:value "+pageName+":"+m_refersTo.get( pageName ));
m_refersTo.remove( pageName );
}
/**
* Updates the referred pages of a new or edited WikiPage. If a refersTo
* entry for this page already exists, it is removed and a new one is built
* from scratch. Also calls updateReferredBy() for each referenced page.
* <P>
* This is the method to call when a new page has been created and we
* want to a) set up its references and b) notify the referred pages
* of the references. Use this method during run-time.
*
* @param page Name of the page to update.
* @param references A Collection of Strings, each one pointing to a page this page references.
*/
public synchronized void updateReferences( String page, Collection references )
{
//
// Create a new entry in m_refersTo.
//
Collection oldRefTo = (Collection)m_refersTo.get( page );
m_refersTo.remove( page );
m_refersTo.put( page, references );
//
// We know the page exists, since it's making references somewhere.
// If an entry for it didn't exist previously in m_referredBy, make
// sure one is added now.
//
if( !m_referredBy.containsKey( page ) )
{
m_referredBy.put( page, new TreeSet() );
}
//
// Get all pages that used to be referred to by 'page' and
// remove that reference. (We don't want to try to figure out
// which particular references were removed...)
//
cleanReferredBy( page, oldRefTo, references );
//
// Notify all referred pages of their referinesshoodicity.
//
Iterator it = references.iterator();
while( it.hasNext() )
{
String referredPageName = (String)it.next();
updateReferredBy( referredPageName, page );
}
}
/**
* Returns the refers-to list. For debugging.
*/
protected Map getRefersTo()
{
return( m_refersTo );
}
/**
* Returns the referred-by list. For debugging.
*/
protected Map getReferredBy()
{
return( m_referredBy );
}
/**
* Cleans the 'referred by' list, removing references by 'referrer' to
* any other page. Called after 'referrer' is removed.
*/
private void cleanReferredBy( String referrer,
Collection oldReferred,
Collection newReferred )
{
// Two ways to go about this. One is to look up all pages previously
// referred by referrer and remove referrer from their lists, and let
// the update put them back in (except possibly removed ones).
// The other is to get the old referred to list, compare to the new,
// and tell the ones missing in the latter to remove referrer from
// their list. Hm. We'll just try the first for now. Need to come
// back and optimize this a bit.
if( oldReferred == null )
return;
Iterator it = oldReferred.iterator();
while( it.hasNext() )
{
String referredPage = (String)it.next();
Set oldRefBy = (Set)m_referredBy.get( referredPage );
if( oldRefBy != null )
{
oldRefBy.remove( referrer );
}
// If the page is referred to by no one AND it doesn't even
// exist, we might just as well forget about this entry.
// It will be added again elsewhere if new references appear.
if( ( ( oldRefBy == null ) || ( oldRefBy.isEmpty() ) ) &&
( m_engine.pageExists( referredPage ) == false ) )
{
m_referredBy.remove( referredPage );
}
}
}
/**
* When initially building a ReferenceManager from scratch, call this method
* BEFORE calling updateReferences() with a full list of existing page names.
* It builds the refersTo and referredBy key lists, thus enabling
* updateReferences() to function correctly.
* <P>
* This method should NEVER be called after initialization. It clears all mappings
* from the reference tables.
*
* @param pages a Collection containing WikiPage objects.
*/
private synchronized void buildKeyLists( Collection pages )
{
m_refersTo.clear();
m_referredBy.clear();
if( pages == null )
return;
Iterator it = pages.iterator();
try
{
while( it.hasNext() )
{
WikiPage page = (WikiPage)it.next();
// We add a non-null entry to referredBy to indicate the referred page exists
m_referredBy.put( page.getName(), new TreeSet() );
// Just add a key to refersTo; the keys need to be in sync with referredBy.
m_refersTo.put( page.getName(), null );
}
}
catch( ClassCastException e )
{
log.fatal( "Invalid collection entry in ReferenceManager.buildKeyLists().", e );
}
}
/**
* Marks the page as referred to by the referrer. If the page does not
* exist previously, nothing is done. (This means that some page, somewhere,
* has a link to a page that does not exist.)
* <P>
* This method is NOT synchronized. It should only be referred to from
* within a synchronized method, or it should be made synced if necessary.
*/
private void updateReferredBy( String page, String referrer )
{
// We're not really interested in first level self-references.
if( page.equals( referrer ) )
{
return;
}
// Neither are we interested if plural forms refer to each other.
if( m_matchEnglishPlurals )
{
String p2 = page.endsWith("s") ? page.substring(0,page.length()-1) : page+"s";
if( referrer.equals(p2) )
{
return;
}
}
Set referrers = (Set)m_referredBy.get( page );
// Even if 'page' has not been created yet, it can still be referenced.
// This requires we don't use m_referredBy keys when looking up missing
// pages, of course.
if(referrers == null)
{
referrers = new TreeSet();
m_referredBy.put( page, referrers );
}
referrers.add( referrer );
}
/**
* Clears the references to a certain page so it's no longer in the map.
*
* @param pagename Name of the page to clear references for.
*/
public synchronized void clearPageEntries( String pagename )
{
m_referredBy.remove(pagename);
}
/**
* Finds all unreferenced pages. This requires a linear scan through
* m_referredBy to locate keys with null or empty values.
*/
public synchronized Collection findUnreferenced()
{
ArrayList unref = new ArrayList();
Set keys = m_referredBy.keySet();
Iterator it = keys.iterator();
while( it.hasNext() )
{
String key = (String) it.next();
//Set refs = (Set) m_referredBy.get( key );
Set refs = getReferenceList( m_referredBy, key );
if( refs == null || refs.isEmpty() )
{
unref.add( key );
}
}
return unref;
}
/**
* Finds all references to non-existant pages. This requires a linear
* scan through m_refersTo values; each value must have a corresponding
* key entry in the reference Maps, otherwise such a page has never
* been created.
* <P>
* Returns a Collection containing Strings of unreferenced page names.
* Each non-existant page name is shown only once - we don't return information
* on who referred to it.
*/
public synchronized Collection findUncreated()
{
TreeSet uncreated = new TreeSet();
// Go through m_refersTo values and check that m_refersTo has the corresponding keys.
// We want to reread the code to make sure our HashMaps are in sync...
Collection allReferences = m_refersTo.values();
Iterator it = allReferences.iterator();
while( it.hasNext() )
{
Collection refs = (Collection)it.next();
if( refs != null )
{
Iterator rit = refs.iterator();
while( rit.hasNext() )
{
String aReference = (String)rit.next();
if( m_engine.pageExists( aReference ) == false )
{
uncreated.add( aReference );
}
}
}
}
return uncreated;
}
/**
* Searches for the given page in the given Map.
*/
private Set getReferenceList( Map coll, String pagename )
{
Set refs = (Set)coll.get( pagename );
if( m_matchEnglishPlurals )
{
//
// We'll add also matches from the "other" page.
//
Set refs2;
if( pagename.endsWith("s") )
{
refs2 = (Set)coll.get( pagename.substring(0,pagename.length()-1) );
}
else
{
refs2 = (Set)coll.get( pagename+"s" );
}
if( refs2 != null )
{
if( refs != null )
refs.addAll( refs2 );
else
refs = refs2;
}
}
return refs;
}
/**
* Find all pages that refer to this page. Returns null if the page
* does not exist or is not referenced at all, otherwise returns a
* collection containing page names (String) that refer to this one.
* <p>
* @param pagename The page to find referrers for.
* @return A Collection of Strings. (This is, in fact, a Set, and is likely
* to change at some point to a Set). May return null, if the page
* does not exist, or if it has no references.
*/
// FIXME: Return a Set instead of a Collection.
public synchronized Collection findReferrers( String pagename )
{
Set refs = getReferenceList( m_referredBy, pagename );
if( refs == null || refs.isEmpty() )
{
return null;
}
return refs;
}
/**
* Returns all pages that refer to this page. Note that this method
* returns an unmodifiable Map, which may be abruptly changed. So any
* access to any iterator may result in a ConcurrentModificationException.
* <p>
* The advantages of using this method over findReferrers() is that
* it is very fast, as it does not create a new object. The disadvantages
* are that it does not do any mapping between plural names, and you
* may end up getting a ConcurrentModificationException.
*
* @param pageName Page name to query.
* @return A Set of Strings containing the names of all the pages that refer
* to this page. May return null, if the page does not exist or
* has not been indexed yet.
* @since 2.2.33
*/
public Set findReferredBy( String pageName )
{
return (Set)m_unmutableReferredBy.get( pageName );
}
/**
* Returns all pages that this page refers to. You can use this as a quick
* way of getting the links from a page, but note that it does not link any
* InterWiki, image, or external links. It does contain attachments, though.
* <p>
* The Collection returned is unmutable, so you cannot change it. It does reflect
* the current status and thus is a live object. So, if you are using any
* kind of an iterator on it, be prepared for ConcurrentModificationExceptions.
* <p>
* The returned value is a Collection, because a page may refer to another page
* multiple times.
*
* @param pageName Page name to query
* @return A Collection of Strings containing the names of the pages that this page
* refers to. May return null, if the page does not exist or has not
* been indexed yet.
* @since 2.2.33
*/
public Collection findRefersTo( String pageName )
{
return (Collection)m_unmutableRefersTo.get( pageName );
}
}
| Switched to StopWatch.
Oops, forgot to initialize unmutable maps after unserialization
git-svn-id: 6c0206e3b9edd104850923da33ebd73b435d374d@624538 13f79535-47bb-0310-9956-ffa450edef68
| src/com/ecyrd/jspwiki/ReferenceManager.java | Switched to StopWatch. Oops, forgot to initialize unmutable maps after unserialization |
|
Java | apache-2.0 | 3292354b51170ae56ae95ddd5ac267de506da307 | 0 | crate/crate,crate/crate,crate/crate | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.service;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateApplier;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.ClusterStateTaskConfig;
import org.elasticsearch.cluster.LocalNodeMasterListener;
import org.elasticsearch.cluster.NodeConnectionsService;
import org.elasticsearch.cluster.TimeoutClusterStateListener;
import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor;
import org.elasticsearch.threadpool.Scheduler;
import org.elasticsearch.threadpool.ThreadPool;
import io.crate.common.unit.TimeValue;
public class ClusterApplierService extends AbstractLifecycleComponent implements ClusterApplier {
private static final Logger LOGGER = LogManager.getLogger(ClusterApplierService.class);
public static final Setting<TimeValue> CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING =
Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30),
Setting.Property.Dynamic, Setting.Property.NodeScope);
public static final String CLUSTER_UPDATE_THREAD_NAME = "clusterApplierService#updateTask";
private final ClusterSettings clusterSettings;
protected final ThreadPool threadPool;
private volatile TimeValue slowTaskLoggingThreshold;
private volatile PrioritizedEsThreadPoolExecutor threadPoolExecutor;
/**
* Those 3 state listeners are changing infrequently - CopyOnWriteArrayList is just fine
*/
private final Collection<ClusterStateApplier> highPriorityStateAppliers = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateApplier> normalPriorityStateAppliers = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateApplier> lowPriorityStateAppliers = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateListener> clusterStateListeners = new CopyOnWriteArrayList<>();
private final Map<TimeoutClusterStateListener, NotifyTimeout> timeoutClusterStateListeners = new ConcurrentHashMap<>();
private final AtomicReference<ClusterState> state; // last applied state
private final String nodeName;
private NodeConnectionsService nodeConnectionsService;
public ClusterApplierService(String nodeName, Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) {
this.clusterSettings = clusterSettings;
this.threadPool = threadPool;
this.state = new AtomicReference<>();
this.nodeName = nodeName;
this.slowTaskLoggingThreshold = CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.get(settings);
this.clusterSettings.addSettingsUpdateConsumer(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
this::setSlowTaskLoggingThreshold);
}
private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) {
this.slowTaskLoggingThreshold = slowTaskLoggingThreshold;
}
public synchronized void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) {
assert this.nodeConnectionsService == null : "nodeConnectionsService is already set";
this.nodeConnectionsService = nodeConnectionsService;
}
@Override
public void setInitialState(ClusterState initialState) {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial state when started");
}
assert state.get() == null : "state is already set";
state.set(initialState);
}
@Override
protected synchronized void doStart() {
Objects.requireNonNull(nodeConnectionsService, "please set the node connection service before starting");
Objects.requireNonNull(state.get(), "please set initial state before starting");
threadPoolExecutor = createThreadPoolExecutor();
}
protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() {
return EsExecutors.newSinglePrioritizing(
nodeName + "/" + CLUSTER_UPDATE_THREAD_NAME,
daemonThreadFactory(nodeName, CLUSTER_UPDATE_THREAD_NAME),
threadPool.scheduler());
}
class UpdateTask extends SourcePrioritizedRunnable implements Function<ClusterState, ClusterState> {
final ClusterApplyListener listener;
final Function<ClusterState, ClusterState> updateFunction;
UpdateTask(Priority priority, String source, ClusterApplyListener listener,
Function<ClusterState, ClusterState> updateFunction) {
super(priority, source);
this.listener = listener;
this.updateFunction = updateFunction;
}
@Override
public ClusterState apply(ClusterState clusterState) {
return updateFunction.apply(clusterState);
}
@Override
public void run() {
runTask(this);
}
}
@Override
protected synchronized void doStop() {
for (Map.Entry<TimeoutClusterStateListener, NotifyTimeout> onGoingTimeout : timeoutClusterStateListeners.entrySet()) {
try {
onGoingTimeout.getValue().cancel();
onGoingTimeout.getKey().onClose();
} catch (Exception ex) {
LOGGER.debug("failed to notify listeners on shutdown", ex);
}
}
ThreadPool.terminate(threadPoolExecutor, 10, TimeUnit.SECONDS);
}
@Override
protected synchronized void doClose() {
}
public ThreadPool threadPool() {
return threadPool;
}
/**
* The current cluster state.
* Should be renamed to appliedClusterState
*/
public ClusterState state() {
assert assertNotCalledFromClusterStateApplier("the applied cluster state is not yet available");
ClusterState clusterState = this.state.get();
assert clusterState != null : "initial cluster state not set yet";
return clusterState;
}
/**
* Adds a high priority applier of updated cluster states.
*/
public void addHighPriorityApplier(ClusterStateApplier applier) {
highPriorityStateAppliers.add(applier);
}
/**
* Adds an applier which will be called after all high priority and normal appliers have been called.
*/
public void addLowPriorityApplier(ClusterStateApplier applier) {
lowPriorityStateAppliers.add(applier);
}
/**
* Adds a applier of updated cluster states.
*/
public void addStateApplier(ClusterStateApplier applier) {
normalPriorityStateAppliers.add(applier);
}
/**
* Removes an applier of updated cluster states.
*/
public void removeApplier(ClusterStateApplier applier) {
normalPriorityStateAppliers.remove(applier);
highPriorityStateAppliers.remove(applier);
lowPriorityStateAppliers.remove(applier);
}
/**
* Add a listener for updated cluster states
*/
public void addListener(ClusterStateListener listener) {
clusterStateListeners.add(listener);
}
/**
* Removes a listener for updated cluster states.
*/
public void removeListener(ClusterStateListener listener) {
clusterStateListeners.remove(listener);
}
/**
* Removes a timeout listener for updated cluster states.
*/
public void removeTimeoutListener(TimeoutClusterStateListener listener) {
final NotifyTimeout timeout = timeoutClusterStateListeners.remove(listener);
if (timeout != null) {
timeout.cancel();
}
}
/**
* Add a listener for on/off local node master events
*/
public void addLocalNodeMasterListener(LocalNodeMasterListener listener) {
addListener(listener);
}
/**
* Adds a cluster state listener that is expected to be removed during a short period of time.
* If provided, the listener will be notified once a specific time has elapsed.
*
* NOTE: the listener is not removed on timeout. This is the responsibility of the caller.
*/
public void addTimeoutListener(@Nullable final TimeValue timeout, final TimeoutClusterStateListener listener) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
return;
}
// call the post added notification on the same event thread
try {
threadPoolExecutor.execute(new SourcePrioritizedRunnable(Priority.HIGH, "_add_listener_") {
@Override
public void run() {
final NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout);
final NotifyTimeout previous = timeoutClusterStateListeners.put(listener, notifyTimeout);
assert previous == null : "Added same listener [" + listener + "]";
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
return;
}
if (timeout != null) {
notifyTimeout.cancellable = threadPool.schedule(notifyTimeout, timeout, ThreadPool.Names.GENERIC);
}
listener.postAdded();
}
});
} catch (EsRejectedExecutionException e) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
throw e;
}
}
}
public void runOnApplierThread(final String source, Consumer<ClusterState> clusterStateConsumer,
final ClusterApplyListener listener, Priority priority) {
submitStateUpdateTask(source, ClusterStateTaskConfig.build(priority),
(clusterState) -> {
clusterStateConsumer.accept(clusterState);
return clusterState;
},
listener);
}
public void runOnApplierThread(final String source, Consumer<ClusterState> clusterStateConsumer,
final ClusterApplyListener listener) {
runOnApplierThread(source, clusterStateConsumer, listener, Priority.HIGH);
}
@Override
public void onNewClusterState(final String source, final Supplier<ClusterState> clusterStateSupplier,
final ClusterApplyListener listener) {
Function<ClusterState, ClusterState> applyFunction = currentState -> {
ClusterState nextState = clusterStateSupplier.get();
if (nextState != null) {
return nextState;
} else {
return currentState;
}
};
submitStateUpdateTask(source, ClusterStateTaskConfig.build(Priority.HIGH), applyFunction, listener);
}
private void submitStateUpdateTask(final String source, final ClusterStateTaskConfig config,
final Function<ClusterState, ClusterState> executor,
final ClusterApplyListener listener) {
if (!lifecycle.started()) {
return;
}
try {
UpdateTask updateTask = new UpdateTask(config.priority(), source, new SafeClusterApplyListener(listener, LOGGER), executor);
if (config.timeout() != null) {
threadPoolExecutor.execute(updateTask, config.timeout(),
() -> threadPool.generic().execute(
() -> listener.onFailure(source, new ProcessClusterEventTimeoutException(config.timeout(), source))));
} else {
threadPoolExecutor.execute(updateTask);
}
} catch (EsRejectedExecutionException e) {
// ignore cases where we are shutting down..., there is really nothing interesting
// to be done here...
if (!lifecycle.stoppedOrClosed()) {
throw e;
}
}
}
/** asserts that the current thread is <b>NOT</b> the cluster state update thread */
public static boolean assertNotClusterStateUpdateThread(String reason) {
assert Thread.currentThread().getName().contains(CLUSTER_UPDATE_THREAD_NAME) == false :
"Expected current thread [" + Thread.currentThread() + "] to not be the cluster state update thread. Reason: [" + reason + "]";
return true;
}
/** asserts that the current stack trace does <b>NOT</b> involve a cluster state applier */
private static boolean assertNotCalledFromClusterStateApplier(String reason) {
if (Thread.currentThread().getName().contains(CLUSTER_UPDATE_THREAD_NAME)) {
for (StackTraceElement element : Thread.currentThread().getStackTrace()) {
final String className = element.getClassName();
final String methodName = element.getMethodName();
if (className.equals(ClusterStateObserver.class.getName())) {
// people may start an observer from an applier
return true;
} else if (className.equals(ClusterApplierService.class.getName())
&& methodName.equals("callClusterStateAppliers")) {
throw new AssertionError("should not be called by a cluster state applier. reason [" + reason + "]");
}
}
}
return true;
}
private void runTask(UpdateTask task) {
if (!lifecycle.started()) {
LOGGER.debug("processing [{}]: ignoring, cluster applier service not started", task.source);
return;
}
LOGGER.debug("processing [{}]: execute", task.source);
final ClusterState previousClusterState = state.get();
long startTimeMS = currentTimeInMillis();
final StopWatch stopWatch = new StopWatch();
final ClusterState newClusterState;
try {
try (Releasable ignored = stopWatch.timing("running task [" + task.source + ']')) {
newClusterState = task.apply(previousClusterState);
}
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS));
LOGGER.trace(() -> new ParameterizedMessage(
"failed to execute cluster state applier in [{}], state:\nversion [{}], source [{}]\n{}",
executionTime, previousClusterState.version(), task.source, previousClusterState), e);
warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch);
task.listener.onFailure(task.source, e);
return;
}
if (previousClusterState == newClusterState) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS));
LOGGER.debug("processing [{}]: took [{}] no change in cluster state", task.source, executionTime);
warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch);
task.listener.onSuccess(task.source);
} else {
if (LOGGER.isTraceEnabled()) {
LOGGER.debug("cluster state updated, version [{}], source [{}]\n{}", newClusterState.version(), task.source,
newClusterState);
} else {
LOGGER.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), task.source);
}
try {
applyChanges(task, previousClusterState, newClusterState, stopWatch);
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS));
LOGGER.debug("processing [{}]: took [{}] done applying updated cluster state (version: {}, uuid: {})", task.source,
executionTime, newClusterState.version(),
newClusterState.stateUUID());
warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch);
task.listener.onSuccess(task.source);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS));
if (LOGGER.isTraceEnabled()) {
LOGGER.warn(new ParameterizedMessage(
"failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}",
executionTime, newClusterState.version(), newClusterState.stateUUID(), task.source, newClusterState), e);
} else {
LOGGER.warn(new ParameterizedMessage(
"failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]",
executionTime, newClusterState.version(), newClusterState.stateUUID(), task.source), e);
}
// failing to apply a cluster state with an exception indicates a bug in validation or in one of the appliers; if we
// continue we will retry with the same cluster state but that might not help.
assert applicationMayFail();
task.listener.onFailure(task.source, e);
}
}
}
private void applyChanges(UpdateTask task, ClusterState previousClusterState, ClusterState newClusterState, StopWatch stopWatch) {
ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(task.source, newClusterState, previousClusterState);
// new cluster state, notify all listeners
final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta();
if (nodesDelta.hasChanges() && LOGGER.isInfoEnabled()) {
String summary = nodesDelta.shortSummary();
if (summary.length() > 0) {
LOGGER.info("{}, term: {}, version: {}, reason: {}",
summary, newClusterState.term(), newClusterState.version(), task.source);
}
}
LOGGER.trace("connecting to nodes of cluster state with version {}", newClusterState.version());
try (Releasable ignored = stopWatch.timing("connecting to new nodes")) {
connectToNodesAndWait(newClusterState);
}
// nothing to do until we actually recover from the gateway or any other block indicates we need to disable persistency
if (clusterChangedEvent.state().blocks().disableStatePersistence() == false && clusterChangedEvent.metadataChanged()) {
LOGGER.debug("applying settings from cluster state with version {}", newClusterState.version());
final Settings incomingSettings = clusterChangedEvent.state().metadata().settings();
try (Releasable ignored = stopWatch.timing("applying settings")) {
clusterSettings.applySettings(incomingSettings);
}
}
LOGGER.debug("apply cluster state with version {}", newClusterState.version());
callClusterStateAppliers(clusterChangedEvent, stopWatch);
nodeConnectionsService.disconnectFromNodesExcept(newClusterState.nodes());
LOGGER.debug("set locally applied cluster state to version {}", newClusterState.version());
state.set(newClusterState);
callClusterStateListeners(clusterChangedEvent, stopWatch);
}
protected void connectToNodesAndWait(ClusterState newClusterState) {
// can't wait for an ActionFuture on the cluster applier thread, but we do want to block the thread here, so use a CountDownLatch.
final CountDownLatch countDownLatch = new CountDownLatch(1);
nodeConnectionsService.connectToNodes(newClusterState.nodes(), countDownLatch::countDown);
try {
countDownLatch.await();
} catch (InterruptedException e) {
LOGGER.debug("interrupted while connecting to nodes, continuing", e);
Thread.currentThread().interrupt();
}
}
private void callClusterStateAppliers(ClusterChangedEvent clusterChangedEvent, StopWatch stopWatch) {
callClusterStateAppliers(clusterChangedEvent, stopWatch, highPriorityStateAppliers);
callClusterStateAppliers(clusterChangedEvent, stopWatch, normalPriorityStateAppliers);
callClusterStateAppliers(clusterChangedEvent, stopWatch, lowPriorityStateAppliers);
}
private static void callClusterStateAppliers(ClusterChangedEvent clusterChangedEvent, StopWatch stopWatch,
Collection<ClusterStateApplier> clusterStateAppliers) {
for (ClusterStateApplier applier : clusterStateAppliers) {
LOGGER.trace("calling [{}] with change to version [{}]", applier, clusterChangedEvent.state().version());
try (Releasable ignored = stopWatch.timing("running applier [" + applier + "]")) {
applier.applyClusterState(clusterChangedEvent);
}
}
}
private void callClusterStateListeners(ClusterChangedEvent clusterChangedEvent, StopWatch stopWatch) {
callClusterStateListener(clusterChangedEvent, stopWatch, clusterStateListeners);
callClusterStateListener(clusterChangedEvent, stopWatch, timeoutClusterStateListeners.keySet());
}
private void callClusterStateListener(ClusterChangedEvent clusterChangedEvent, StopWatch stopWatch,
Collection<? extends ClusterStateListener> listeners) {
for (ClusterStateListener listener : listeners) {
try {
LOGGER.trace("calling [{}] with change to version [{}]", listener, clusterChangedEvent.state().version());
try (Releasable ignored = stopWatch.timing("notifying listener [" + listener + "]")) {
listener.clusterChanged(clusterChangedEvent);
}
} catch (Exception ex) {
LOGGER.warn("failed to notify ClusterStateListener", ex);
}
}
}
private static class SafeClusterApplyListener implements ClusterApplyListener {
private final ClusterApplyListener listener;
private final Logger logger;
SafeClusterApplyListener(ClusterApplyListener listener, Logger logger) {
this.listener = listener;
this.logger = logger;
}
@Override
public void onFailure(String source, Exception e) {
try {
listener.onFailure(source, e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error(new ParameterizedMessage(
"exception thrown by listener notifying of failure from [{}]", source), inner);
}
}
@Override
public void onSuccess(String source) {
try {
listener.onSuccess(source);
} catch (Exception e) {
logger.error(new ParameterizedMessage(
"exception thrown by listener while notifying of cluster state processed from [{}]", source), e);
}
}
}
private void warnAboutSlowTaskIfNeeded(TimeValue executionTime, String source, StopWatch stopWatch) {
if (executionTime.getMillis() > slowTaskLoggingThreshold.getMillis()) {
LOGGER.warn("cluster state applier task [{}] took [{}] which is above the warn threshold of [{}]: {}", source, executionTime,
slowTaskLoggingThreshold, Arrays.stream(stopWatch.taskInfo())
.map(ti -> '[' + ti.getTaskName() + "] took [" + ti.getTime().millis() + "ms]").collect(Collectors.joining(", ")));
}
}
private class NotifyTimeout implements Runnable {
final TimeoutClusterStateListener listener;
@Nullable
final TimeValue timeout;
volatile Scheduler.Cancellable cancellable;
NotifyTimeout(TimeoutClusterStateListener listener, @Nullable TimeValue timeout) {
this.listener = listener;
this.timeout = timeout;
}
public void cancel() {
if (cancellable != null) {
cancellable.cancel();
}
}
@Override
public void run() {
assert timeout != null : "This should only ever execute if there's an actual timeout set";
if (cancellable != null && cancellable.isCancelled()) {
return;
}
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
listener.onTimeout(this.timeout);
}
// note, we rely on the listener to remove itself in case of timeout if needed
}
}
// this one is overridden in tests so we can control time
protected long currentTimeInMillis() {
return threadPool.relativeTimeInMillis();
}
// overridden by tests that need to check behaviour in the event of an application failure
protected boolean applicationMayFail() {
return false;
}
}
| server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.service;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateApplier;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.ClusterStateTaskConfig;
import org.elasticsearch.cluster.LocalNodeMasterListener;
import org.elasticsearch.cluster.NodeConnectionsService;
import org.elasticsearch.cluster.TimeoutClusterStateListener;
import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor;
import org.elasticsearch.threadpool.Scheduler;
import org.elasticsearch.threadpool.ThreadPool;
import io.crate.common.unit.TimeValue;
public class ClusterApplierService extends AbstractLifecycleComponent implements ClusterApplier {
private static final Logger LOGGER = LogManager.getLogger(ClusterApplierService.class);
public static final Setting<TimeValue> CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING =
Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30),
Setting.Property.Dynamic, Setting.Property.NodeScope);
public static final String CLUSTER_UPDATE_THREAD_NAME = "clusterApplierService#updateTask";
private final ClusterSettings clusterSettings;
protected final ThreadPool threadPool;
private volatile TimeValue slowTaskLoggingThreshold;
private volatile PrioritizedEsThreadPoolExecutor threadPoolExecutor;
/**
* Those 3 state listeners are changing infrequently - CopyOnWriteArrayList is just fine
*/
private final Collection<ClusterStateApplier> highPriorityStateAppliers = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateApplier> normalPriorityStateAppliers = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateApplier> lowPriorityStateAppliers = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateListener> clusterStateListeners = new CopyOnWriteArrayList<>();
private final Map<TimeoutClusterStateListener, NotifyTimeout> timeoutClusterStateListeners = new ConcurrentHashMap<>();
private final AtomicReference<ClusterState> state; // last applied state
private final String nodeName;
private NodeConnectionsService nodeConnectionsService;
public ClusterApplierService(String nodeName, Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) {
this.clusterSettings = clusterSettings;
this.threadPool = threadPool;
this.state = new AtomicReference<>();
this.nodeName = nodeName;
this.slowTaskLoggingThreshold = CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.get(settings);
this.clusterSettings.addSettingsUpdateConsumer(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
this::setSlowTaskLoggingThreshold);
}
private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) {
this.slowTaskLoggingThreshold = slowTaskLoggingThreshold;
}
public synchronized void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) {
assert this.nodeConnectionsService == null : "nodeConnectionsService is already set";
this.nodeConnectionsService = nodeConnectionsService;
}
@Override
public void setInitialState(ClusterState initialState) {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial state when started");
}
assert state.get() == null : "state is already set";
state.set(initialState);
}
@Override
protected synchronized void doStart() {
Objects.requireNonNull(nodeConnectionsService, "please set the node connection service before starting");
Objects.requireNonNull(state.get(), "please set initial state before starting");
threadPoolExecutor = createThreadPoolExecutor();
}
protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() {
return EsExecutors.newSinglePrioritizing(
nodeName + "/" + CLUSTER_UPDATE_THREAD_NAME,
daemonThreadFactory(nodeName, CLUSTER_UPDATE_THREAD_NAME),
threadPool.scheduler());
}
class UpdateTask extends SourcePrioritizedRunnable implements Function<ClusterState, ClusterState> {
final ClusterApplyListener listener;
final Function<ClusterState, ClusterState> updateFunction;
UpdateTask(Priority priority, String source, ClusterApplyListener listener,
Function<ClusterState, ClusterState> updateFunction) {
super(priority, source);
this.listener = listener;
this.updateFunction = updateFunction;
}
@Override
public ClusterState apply(ClusterState clusterState) {
return updateFunction.apply(clusterState);
}
@Override
public void run() {
runTask(this);
}
}
@Override
protected synchronized void doStop() {
for (Map.Entry<TimeoutClusterStateListener, NotifyTimeout> onGoingTimeout : timeoutClusterStateListeners.entrySet()) {
try {
onGoingTimeout.getValue().cancel();
onGoingTimeout.getKey().onClose();
} catch (Exception ex) {
LOGGER.debug("failed to notify listeners on shutdown", ex);
}
}
ThreadPool.terminate(threadPoolExecutor, 10, TimeUnit.SECONDS);
}
@Override
protected synchronized void doClose() {
}
public ThreadPool threadPool() {
return threadPool;
}
/**
* The current cluster state.
* Should be renamed to appliedClusterState
*/
public ClusterState state() {
assert assertNotCalledFromClusterStateApplier("the applied cluster state is not yet available");
ClusterState clusterState = this.state.get();
assert clusterState != null : "initial cluster state not set yet";
return clusterState;
}
/**
* Adds a high priority applier of updated cluster states.
*/
public void addHighPriorityApplier(ClusterStateApplier applier) {
highPriorityStateAppliers.add(applier);
}
/**
* Adds an applier which will be called after all high priority and normal appliers have been called.
*/
public void addLowPriorityApplier(ClusterStateApplier applier) {
lowPriorityStateAppliers.add(applier);
}
/**
* Adds a applier of updated cluster states.
*/
public void addStateApplier(ClusterStateApplier applier) {
normalPriorityStateAppliers.add(applier);
}
/**
* Removes an applier of updated cluster states.
*/
public void removeApplier(ClusterStateApplier applier) {
normalPriorityStateAppliers.remove(applier);
highPriorityStateAppliers.remove(applier);
lowPriorityStateAppliers.remove(applier);
}
/**
* Add a listener for updated cluster states
*/
public void addListener(ClusterStateListener listener) {
clusterStateListeners.add(listener);
}
/**
* Removes a listener for updated cluster states.
*/
public void removeListener(ClusterStateListener listener) {
clusterStateListeners.remove(listener);
}
/**
* Removes a timeout listener for updated cluster states.
*/
public void removeTimeoutListener(TimeoutClusterStateListener listener) {
final NotifyTimeout timeout = timeoutClusterStateListeners.remove(listener);
if (timeout != null) {
timeout.cancel();
}
}
/**
* Add a listener for on/off local node master events
*/
public void addLocalNodeMasterListener(LocalNodeMasterListener listener) {
addListener(listener);
}
/**
* Adds a cluster state listener that is expected to be removed during a short period of time.
* If provided, the listener will be notified once a specific time has elapsed.
*
* NOTE: the listener is not removed on timeout. This is the responsibility of the caller.
*/
public void addTimeoutListener(@Nullable final TimeValue timeout, final TimeoutClusterStateListener listener) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
return;
}
// call the post added notification on the same event thread
try {
threadPoolExecutor.execute(new SourcePrioritizedRunnable(Priority.HIGH, "_add_listener_") {
@Override
public void run() {
final NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout);
final NotifyTimeout previous = timeoutClusterStateListeners.put(listener, notifyTimeout);
assert previous == null : "Added same listener [" + listener + "]";
if (timeout != null) {
notifyTimeout.cancellable = threadPool.schedule(notifyTimeout, timeout, ThreadPool.Names.GENERIC);
}
listener.postAdded();
}
});
} catch (EsRejectedExecutionException e) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
throw e;
}
}
}
public void runOnApplierThread(final String source, Consumer<ClusterState> clusterStateConsumer,
final ClusterApplyListener listener, Priority priority) {
submitStateUpdateTask(source, ClusterStateTaskConfig.build(priority),
(clusterState) -> {
clusterStateConsumer.accept(clusterState);
return clusterState;
},
listener);
}
public void runOnApplierThread(final String source, Consumer<ClusterState> clusterStateConsumer,
final ClusterApplyListener listener) {
runOnApplierThread(source, clusterStateConsumer, listener, Priority.HIGH);
}
@Override
public void onNewClusterState(final String source, final Supplier<ClusterState> clusterStateSupplier,
final ClusterApplyListener listener) {
Function<ClusterState, ClusterState> applyFunction = currentState -> {
ClusterState nextState = clusterStateSupplier.get();
if (nextState != null) {
return nextState;
} else {
return currentState;
}
};
submitStateUpdateTask(source, ClusterStateTaskConfig.build(Priority.HIGH), applyFunction, listener);
}
private void submitStateUpdateTask(final String source, final ClusterStateTaskConfig config,
final Function<ClusterState, ClusterState> executor,
final ClusterApplyListener listener) {
if (!lifecycle.started()) {
return;
}
try {
UpdateTask updateTask = new UpdateTask(config.priority(), source, new SafeClusterApplyListener(listener, LOGGER), executor);
if (config.timeout() != null) {
threadPoolExecutor.execute(updateTask, config.timeout(),
() -> threadPool.generic().execute(
() -> listener.onFailure(source, new ProcessClusterEventTimeoutException(config.timeout(), source))));
} else {
threadPoolExecutor.execute(updateTask);
}
} catch (EsRejectedExecutionException e) {
// ignore cases where we are shutting down..., there is really nothing interesting
// to be done here...
if (!lifecycle.stoppedOrClosed()) {
throw e;
}
}
}
/** asserts that the current thread is <b>NOT</b> the cluster state update thread */
public static boolean assertNotClusterStateUpdateThread(String reason) {
assert Thread.currentThread().getName().contains(CLUSTER_UPDATE_THREAD_NAME) == false :
"Expected current thread [" + Thread.currentThread() + "] to not be the cluster state update thread. Reason: [" + reason + "]";
return true;
}
/** asserts that the current stack trace does <b>NOT</b> involve a cluster state applier */
private static boolean assertNotCalledFromClusterStateApplier(String reason) {
if (Thread.currentThread().getName().contains(CLUSTER_UPDATE_THREAD_NAME)) {
for (StackTraceElement element : Thread.currentThread().getStackTrace()) {
final String className = element.getClassName();
final String methodName = element.getMethodName();
if (className.equals(ClusterStateObserver.class.getName())) {
// people may start an observer from an applier
return true;
} else if (className.equals(ClusterApplierService.class.getName())
&& methodName.equals("callClusterStateAppliers")) {
throw new AssertionError("should not be called by a cluster state applier. reason [" + reason + "]");
}
}
}
return true;
}
private void runTask(UpdateTask task) {
if (!lifecycle.started()) {
LOGGER.debug("processing [{}]: ignoring, cluster applier service not started", task.source);
return;
}
LOGGER.debug("processing [{}]: execute", task.source);
final ClusterState previousClusterState = state.get();
long startTimeMS = currentTimeInMillis();
final StopWatch stopWatch = new StopWatch();
final ClusterState newClusterState;
try {
try (Releasable ignored = stopWatch.timing("running task [" + task.source + ']')) {
newClusterState = task.apply(previousClusterState);
}
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS));
LOGGER.trace(() -> new ParameterizedMessage(
"failed to execute cluster state applier in [{}], state:\nversion [{}], source [{}]\n{}",
executionTime, previousClusterState.version(), task.source, previousClusterState), e);
warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch);
task.listener.onFailure(task.source, e);
return;
}
if (previousClusterState == newClusterState) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS));
LOGGER.debug("processing [{}]: took [{}] no change in cluster state", task.source, executionTime);
warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch);
task.listener.onSuccess(task.source);
} else {
if (LOGGER.isTraceEnabled()) {
LOGGER.debug("cluster state updated, version [{}], source [{}]\n{}", newClusterState.version(), task.source,
newClusterState);
} else {
LOGGER.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), task.source);
}
try {
applyChanges(task, previousClusterState, newClusterState, stopWatch);
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS));
LOGGER.debug("processing [{}]: took [{}] done applying updated cluster state (version: {}, uuid: {})", task.source,
executionTime, newClusterState.version(),
newClusterState.stateUUID());
warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch);
task.listener.onSuccess(task.source);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS));
if (LOGGER.isTraceEnabled()) {
LOGGER.warn(new ParameterizedMessage(
"failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}",
executionTime, newClusterState.version(), newClusterState.stateUUID(), task.source, newClusterState), e);
} else {
LOGGER.warn(new ParameterizedMessage(
"failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]",
executionTime, newClusterState.version(), newClusterState.stateUUID(), task.source), e);
}
// failing to apply a cluster state with an exception indicates a bug in validation or in one of the appliers; if we
// continue we will retry with the same cluster state but that might not help.
assert applicationMayFail();
task.listener.onFailure(task.source, e);
}
}
}
private void applyChanges(UpdateTask task, ClusterState previousClusterState, ClusterState newClusterState, StopWatch stopWatch) {
ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(task.source, newClusterState, previousClusterState);
// new cluster state, notify all listeners
final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta();
if (nodesDelta.hasChanges() && LOGGER.isInfoEnabled()) {
String summary = nodesDelta.shortSummary();
if (summary.length() > 0) {
LOGGER.info("{}, term: {}, version: {}, reason: {}",
summary, newClusterState.term(), newClusterState.version(), task.source);
}
}
LOGGER.trace("connecting to nodes of cluster state with version {}", newClusterState.version());
try (Releasable ignored = stopWatch.timing("connecting to new nodes")) {
connectToNodesAndWait(newClusterState);
}
// nothing to do until we actually recover from the gateway or any other block indicates we need to disable persistency
if (clusterChangedEvent.state().blocks().disableStatePersistence() == false && clusterChangedEvent.metadataChanged()) {
LOGGER.debug("applying settings from cluster state with version {}", newClusterState.version());
final Settings incomingSettings = clusterChangedEvent.state().metadata().settings();
try (Releasable ignored = stopWatch.timing("applying settings")) {
clusterSettings.applySettings(incomingSettings);
}
}
LOGGER.debug("apply cluster state with version {}", newClusterState.version());
callClusterStateAppliers(clusterChangedEvent, stopWatch);
nodeConnectionsService.disconnectFromNodesExcept(newClusterState.nodes());
LOGGER.debug("set locally applied cluster state to version {}", newClusterState.version());
state.set(newClusterState);
callClusterStateListeners(clusterChangedEvent, stopWatch);
}
protected void connectToNodesAndWait(ClusterState newClusterState) {
// can't wait for an ActionFuture on the cluster applier thread, but we do want to block the thread here, so use a CountDownLatch.
final CountDownLatch countDownLatch = new CountDownLatch(1);
nodeConnectionsService.connectToNodes(newClusterState.nodes(), countDownLatch::countDown);
try {
countDownLatch.await();
} catch (InterruptedException e) {
LOGGER.debug("interrupted while connecting to nodes, continuing", e);
Thread.currentThread().interrupt();
}
}
private void callClusterStateAppliers(ClusterChangedEvent clusterChangedEvent, StopWatch stopWatch) {
callClusterStateAppliers(clusterChangedEvent, stopWatch, highPriorityStateAppliers);
callClusterStateAppliers(clusterChangedEvent, stopWatch, normalPriorityStateAppliers);
callClusterStateAppliers(clusterChangedEvent, stopWatch, lowPriorityStateAppliers);
}
private static void callClusterStateAppliers(ClusterChangedEvent clusterChangedEvent, StopWatch stopWatch,
Collection<ClusterStateApplier> clusterStateAppliers) {
for (ClusterStateApplier applier : clusterStateAppliers) {
LOGGER.trace("calling [{}] with change to version [{}]", applier, clusterChangedEvent.state().version());
try (Releasable ignored = stopWatch.timing("running applier [" + applier + "]")) {
applier.applyClusterState(clusterChangedEvent);
}
}
}
private void callClusterStateListeners(ClusterChangedEvent clusterChangedEvent, StopWatch stopWatch) {
callClusterStateListener(clusterChangedEvent, stopWatch, clusterStateListeners);
callClusterStateListener(clusterChangedEvent, stopWatch, timeoutClusterStateListeners.keySet());
}
private void callClusterStateListener(ClusterChangedEvent clusterChangedEvent, StopWatch stopWatch,
Collection<? extends ClusterStateListener> listeners) {
for (ClusterStateListener listener : listeners) {
try {
LOGGER.trace("calling [{}] with change to version [{}]", listener, clusterChangedEvent.state().version());
try (Releasable ignored = stopWatch.timing("notifying listener [" + listener + "]")) {
listener.clusterChanged(clusterChangedEvent);
}
} catch (Exception ex) {
LOGGER.warn("failed to notify ClusterStateListener", ex);
}
}
}
private static class SafeClusterApplyListener implements ClusterApplyListener {
private final ClusterApplyListener listener;
private final Logger logger;
SafeClusterApplyListener(ClusterApplyListener listener, Logger logger) {
this.listener = listener;
this.logger = logger;
}
@Override
public void onFailure(String source, Exception e) {
try {
listener.onFailure(source, e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error(new ParameterizedMessage(
"exception thrown by listener notifying of failure from [{}]", source), inner);
}
}
@Override
public void onSuccess(String source) {
try {
listener.onSuccess(source);
} catch (Exception e) {
logger.error(new ParameterizedMessage(
"exception thrown by listener while notifying of cluster state processed from [{}]", source), e);
}
}
}
private void warnAboutSlowTaskIfNeeded(TimeValue executionTime, String source, StopWatch stopWatch) {
if (executionTime.getMillis() > slowTaskLoggingThreshold.getMillis()) {
LOGGER.warn("cluster state applier task [{}] took [{}] which is above the warn threshold of [{}]: {}", source, executionTime,
slowTaskLoggingThreshold, Arrays.stream(stopWatch.taskInfo())
.map(ti -> '[' + ti.getTaskName() + "] took [" + ti.getTime().millis() + "ms]").collect(Collectors.joining(", ")));
}
}
private class NotifyTimeout implements Runnable {
final TimeoutClusterStateListener listener;
@Nullable
final TimeValue timeout;
volatile Scheduler.Cancellable cancellable;
NotifyTimeout(TimeoutClusterStateListener listener, @Nullable TimeValue timeout) {
this.listener = listener;
this.timeout = timeout;
}
public void cancel() {
if (cancellable != null) {
cancellable.cancel();
}
}
@Override
public void run() {
assert timeout != null : "This should only ever execute if there's an actual timeout set";
if (cancellable != null && cancellable.isCancelled()) {
return;
}
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
listener.onTimeout(this.timeout);
}
// note, we rely on the listener to remove itself in case of timeout if needed
}
}
// this one is overridden in tests so we can control time
protected long currentTimeInMillis() {
return threadPool.relativeTimeInMillis();
}
// overridden by tests that need to check behaviour in the event of an application failure
protected boolean applicationMayFail() {
return false;
}
}
| bp: Fix Race in ClusterApplierService Shutdown
https://github.com/elastic/elasticsearch/commit/106695bec8803cd78bb5073f48ff2b3b954c47ae
| server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java | bp: Fix Race in ClusterApplierService Shutdown |
|
Java | apache-2.0 | b0842b649d391fab94716ca9048542da9c86f0c2 | 0 | dannil/HttpDownloader,dannil/HttpDownloader | package com.github.dannil.httpdownloader.handler;
import java.io.File;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.github.dannil.httpdownloader.model.Download;
import com.github.dannil.httpdownloader.repository.DownloadRepository;
import com.github.dannil.httpdownloader.utility.FileUtility;
/**
* Middleware class that acts between the service layer and the persistence layer. This class
* makes sure that several downloads can be initiated at once, each one in a separate thread.
*
* @author Daniel Nilsson (daniel.nilsson @ dannils.se)
* @version 1.0.0
* @since 1.0.0
*/
@Component
public final class DownloadThreadHandler {
private final static Logger LOGGER = Logger.getLogger(DownloadThreadHandler.class.getName());
private static DownloadThreadHandler downloadThreadHandlerInstance;
private List<Thread> threads;
@Autowired
private DownloadSaveToDisk saveToDiskInstance;
@Autowired
private DownloadDeleteFromDisk deleteFromDiskInstance;
public synchronized static DownloadThreadHandler getInstance() {
if (downloadThreadHandlerInstance == null) {
downloadThreadHandlerInstance = new DownloadThreadHandler();
}
return downloadThreadHandlerInstance;
}
private DownloadThreadHandler() {
this.threads = new LinkedList<Thread>();
}
public final synchronized void saveToDisk(final Download download) {
if (download == null) {
throw new IllegalArgumentException("Download can't be null");
}
this.saveToDiskInstance.setDownload(download);
final Thread t = new Thread(this.saveToDiskInstance, download.getFormat());
this.threads.add(t);
t.start();
}
public final synchronized void deleteFromDisk(final Download download) {
if (download == null) {
throw new IllegalArgumentException("Download can't be null");
}
this.deleteFromDiskInstance.setDownload(download);
final Thread t = new Thread(this.deleteFromDiskInstance, download.getFormat());
this.threads.add(t);
t.start();
}
/**
* Interrupts a thread with the specified name.
*
* @param threadName the name of the thread to interrupt
*/
public final synchronized void interrupt(final String threadName) {
for (final Thread t : this.threads) {
if (t.getName().equals(threadName)) {
LOGGER.info("Found thread " + threadName + ", interrupting...");
t.interrupt();
this.threads.remove(t);
break;
}
}
}
}
@Component
class DownloadSaveToDisk implements Runnable {
private final static Logger LOGGER = Logger.getLogger(DownloadSaveToDisk.class.getName());
@Autowired
private DownloadRepository repository;
private Download download;
private DownloadSaveToDisk() {
}
@Override
public void run() {
LOGGER.info("Trying to save download " + this.download.getFormat());
File file;
try {
file = FileUtility.getFileFromURL(this.download);
FileUtility.saveToDrive(file);
} catch (IOException e) {
e.printStackTrace();
}
this.download.setEndDate(new DateTime());
this.repository.save(this.download);
}
// public final Download getDownload() {
// return this.download;
// }
public final void setDownload(final Download download) {
this.download = download;
}
}
@Component
class DownloadDeleteFromDisk implements Runnable {
private final static Logger LOGGER = Logger.getLogger(DownloadSaveToDisk.class.getName());
// @Autowired
// private DownloadRepository repository;
private Download download;
private DownloadDeleteFromDisk() {
}
@Override
public void run() {
LOGGER.info("Trying to delete download " + this.download.getFormat());
boolean isDeleted = FileUtility.deleteFromDrive(this.download);
if (!isDeleted) {
throw new RuntimeException("Couldn't delete download " + this.download);
}
}
// public final Download getDownload() {
// return this.download;
// }
public final void setDownload(final Download download) {
this.download = download;
}
} | src/main/java/com/github/dannil/httpdownloader/handler/DownloadThreadHandler.java | package com.github.dannil.httpdownloader.handler;
import java.io.File;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.github.dannil.httpdownloader.model.Download;
import com.github.dannil.httpdownloader.repository.DownloadRepository;
import com.github.dannil.httpdownloader.utility.FileUtility;
/**
* Middleware class that acts between the service layer and the persistence layer. This class
* makes sure that several downloads can be initiated at once, each one in a separate thread.
*
* @author Daniel Nilsson (daniel.nilsson @ dannils.se)
* @version 1.0.0
* @since 1.0.0
*/
@Component
public final class DownloadThreadHandler {
private final static Logger LOGGER = Logger.getLogger(DownloadThreadHandler.class.getName());
private static DownloadThreadHandler downloadThreadHandler;
private List<Thread> threads;
@Autowired
private DownloadSaveToDisk saveToDisk;
@Autowired
private DownloadDeleteFromDisk deleteFromDisk;
public synchronized static DownloadThreadHandler getInstance() {
if (downloadThreadHandler == null) {
downloadThreadHandler = new DownloadThreadHandler();
}
return downloadThreadHandler;
}
private DownloadThreadHandler() {
this.threads = new LinkedList<Thread>();
}
public final synchronized void saveToDisk(final Download download) {
if (download == null) {
throw new IllegalArgumentException("Download can't be null");
}
this.saveToDisk.setDownload(download);
final Thread t = new Thread(this.saveToDisk, download.getFormat());
this.threads.add(t);
t.start();
}
public final synchronized void deleteFromDisk(final Download download) {
if (download == null) {
throw new IllegalArgumentException("Download can't be null");
}
this.deleteFromDisk.setDownload(download);
final Thread t = new Thread(this.deleteFromDisk, download.getFormat());
this.threads.add(t);
t.start();
}
/**
* Interrupts a thread with the specified name.
*
* @param threadName the name of the thread to interrupt
*/
public final synchronized void interrupt(final String threadName) {
for (final Thread t : this.threads) {
if (t.getName().equals(threadName)) {
LOGGER.info("Found thread " + threadName + ", interrupting...");
t.interrupt();
this.threads.remove(t);
break;
}
}
}
}
@Component
class DownloadSaveToDisk implements Runnable {
private final static Logger LOGGER = Logger.getLogger(DownloadSaveToDisk.class.getName());
@Autowired
private DownloadRepository repository;
private Download download;
private DownloadSaveToDisk() {
}
@Override
public void run() {
LOGGER.info("Trying to save download " + this.download.getFormat());
File file;
try {
file = FileUtility.getFileFromURL(this.download);
FileUtility.saveToDrive(file);
} catch (IOException e) {
e.printStackTrace();
}
this.download.setEndDate(new DateTime());
this.repository.save(this.download);
}
// public final Download getDownload() {
// return this.download;
// }
public final void setDownload(final Download download) {
this.download = download;
}
}
@Component
class DownloadDeleteFromDisk implements Runnable {
private final static Logger LOGGER = Logger.getLogger(DownloadSaveToDisk.class.getName());
// @Autowired
// private DownloadRepository repository;
private Download download;
private DownloadDeleteFromDisk() {
}
@Override
public void run() {
LOGGER.info("Trying to delete download " + this.download.getFormat());
boolean isDeleted = FileUtility.deleteFromDrive(this.download);
if (!isDeleted) {
throw new RuntimeException("Couldn't delete download " + this.download);
}
}
// public final Download getDownload() {
// return this.download;
// }
public final void setDownload(final Download download) {
this.download = download;
}
} | Changed member variables name | src/main/java/com/github/dannil/httpdownloader/handler/DownloadThreadHandler.java | Changed member variables name |
|
Java | apache-2.0 | 1ff129fe67ba6b26c445c2f1a77b6bea8a0d11f0 | 0 | ravwojdyla/incubator-beam,robertwb/incubator-beam,wtanaka/beam,wtanaka/beam,chamikaramj/beam,josauder/AOP_incubator_beam,robertwb/incubator-beam,apache/beam,charlesccychen/beam,jbonofre/beam,staslev/beam,dhalperi/beam,peihe/incubator-beam,xsm110/Apache-Beam,lukecwik/incubator-beam,lukecwik/incubator-beam,lukecwik/incubator-beam,wangyum/beam,chamikaramj/beam,josauder/AOP_incubator_beam,markflyhigh/incubator-beam,eljefe6a/incubator-beam,rangadi/beam,charlesccychen/incubator-beam,lukecwik/incubator-beam,jasonkuster/beam,rangadi/incubator-beam,amarouni/incubator-beam,jasonkuster/beam,lukecwik/incubator-beam,robertwb/incubator-beam,iemejia/incubator-beam,peihe/incubator-beam,tgroh/incubator-beam,wangyum/beam,charlesccychen/incubator-beam,robertwb/incubator-beam,charlesccychen/beam,staslev/beam,staslev/incubator-beam,amarouni/incubator-beam,manuzhang/beam,mxm/incubator-beam,peihe/incubator-beam,rangadi/beam,vikkyrk/incubator-beam,lukecwik/incubator-beam,vikkyrk/incubator-beam,sammcveety/incubator-beam,apache/beam,mxm/incubator-beam,rangadi/beam,rangadi/incubator-beam,markflyhigh/incubator-beam,RyanSkraba/beam,sammcveety/incubator-beam,rangadi/incubator-beam,charlesccychen/beam,apache/beam,markflyhigh/incubator-beam,rangadi/beam,chamikaramj/beam,eljefe6a/incubator-beam,chamikaramj/beam,chamikaramj/beam,amitsela/beam,sammcveety/incubator-beam,markflyhigh/incubator-beam,apache/beam,manuzhang/beam,eljefe6a/incubator-beam,manuzhang/incubator-beam,tgroh/beam,rangadi/beam,apache/beam,charlesccychen/beam,lukecwik/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,staslev/beam,chamikaramj/beam,robertwb/incubator-beam,charlesccychen/beam,apache/beam,rangadi/beam,wangyum/beam,jbonofre/beam,manuzhang/incubator-beam,robertwb/incubator-beam,vikkyrk/incubator-beam,markflyhigh/incubator-beam,apache/beam,RyanSkraba/beam,RyanSkraba/beam,chamikaramj/beam,wangyum/beam,staslev/incubator-beam,amitsela/beam,tgroh/beam,markflyhigh/incubator-beam,yk5/beam,RyanSkraba/beam,chamikaramj/beam,jbonofre/incubator-beam,robertwb/incubator-beam,jbonofre/incubator-beam,tgroh/incubator-beam,dhalperi/incubator-beam,amitsela/beam,apache/beam,ravwojdyla/incubator-beam,robertwb/incubator-beam,dhalperi/beam,xsm110/Apache-Beam,robertwb/incubator-beam,apache/beam,chamikaramj/incubator-beam,markflyhigh/incubator-beam,yk5/beam,lukecwik/incubator-beam,chamikaramj/beam,RyanSkraba/beam,charlesccychen/beam,amitsela/incubator-beam,jasonkuster/beam,RyanSkraba/beam,dhalperi/incubator-beam,jbonofre/beam,tgroh/beam,rangadi/beam,charlesccychen/incubator-beam,joshualitt/incubator-beam,apache/beam,yk5/beam,iemejia/incubator-beam,tgroh/beam,RyanSkraba/beam,chamikaramj/incubator-beam,apache/beam,ravwojdyla/incubator-beam,charlesccychen/beam,xsm110/Apache-Beam,jasonkuster/incubator-beam,jasonkuster/incubator-beam,chamikaramj/beam,wtanaka/beam,amitsela/incubator-beam,dhalperi/beam,manuzhang/beam,joshualitt/incubator-beam,jbonofre/beam,lukecwik/incubator-beam | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.bigtable;
import static org.junit.Assert.assertThat;
import com.google.bigtable.admin.v2.ColumnFamily;
import com.google.bigtable.admin.v2.CreateTableRequest;
import com.google.bigtable.admin.v2.DeleteTableRequest;
import com.google.bigtable.admin.v2.GetTableRequest;
import com.google.bigtable.admin.v2.Table;
import com.google.bigtable.v2.Mutation;
import com.google.bigtable.v2.ReadRowsRequest;
import com.google.bigtable.v2.Row;
import com.google.bigtable.v2.RowRange;
import com.google.bigtable.v2.RowSet;
import com.google.cloud.bigtable.config.BigtableOptions;
import com.google.cloud.bigtable.config.BigtableOptions.Builder;
import com.google.cloud.bigtable.config.CredentialOptions;
import com.google.cloud.bigtable.config.RetryOptions;
import com.google.cloud.bigtable.grpc.BigtableSession;
import com.google.cloud.bigtable.grpc.BigtableTableAdminClient;
import com.google.cloud.bigtable.grpc.scanner.ResultScanner;
import com.google.common.collect.ImmutableList;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.CountingInput;
import org.apache.beam.sdk.options.GcpOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* End-to-end tests of BigtableWrite.
*/
@RunWith(JUnit4.class)
public class BigtableWriteIT implements Serializable {
/**
* These tests requires a static instances because the writers go through a serialization step
* when executing the test and would not affect passed-in objects otherwise.
*/
private static final String COLUMN_FAMILY_NAME = "cf";
private static BigtableTestOptions options;
private BigtableOptions bigtableOptions;
private static BigtableSession session;
private static BigtableTableAdminClient tableAdminClient;
private final String tableId =
String.format("BigtableWriteIT-%tF-%<tH-%<tM-%<tS-%<tL", new Date());
@Before
public void setup() throws Exception {
PipelineOptionsFactory.register(BigtableTestOptions.class);
options = TestPipeline.testingPipelineOptions().as(BigtableTestOptions.class);
// RetryOptions streamingBatchSize must be explicitly set for getTableData()
RetryOptions.Builder retryOptionsBuilder = new RetryOptions.Builder();
retryOptionsBuilder.setStreamingBatchSize(
retryOptionsBuilder.build().getStreamingBufferSize() / 2);
bigtableOptions =
new Builder()
.setProjectId(options.getProjectId())
.setInstanceId(options.getInstanceId())
.setUserAgent("apache-beam-test")
.setRetryOptions(retryOptionsBuilder.build())
.build();
session =
new BigtableSession(
bigtableOptions
.toBuilder()
.setCredentialOptions(
CredentialOptions.credential(options.as(GcpOptions.class).getGcpCredential()))
.build());
tableAdminClient = session.getTableAdminClient();
}
@Test
public void testE2EBigtableWrite() throws Exception {
final String tableName = bigtableOptions.getInstanceName().toTableNameStr(tableId);
final String instanceName = bigtableOptions.getInstanceName().toString();
final int numRows = 1000;
final List<KV<ByteString, ByteString>> testData = generateTableData(numRows);
createEmptyTable(instanceName, tableId);
Pipeline p = Pipeline.create(options);
p.apply(CountingInput.upTo(numRows))
.apply(ParDo.of(new DoFn<Long, KV<ByteString, Iterable<Mutation>>>() {
@ProcessElement
public void processElement(ProcessContext c) {
int index = c.element().intValue();
Iterable<Mutation> mutations =
ImmutableList.of(Mutation.newBuilder()
.setSetCell(
Mutation.SetCell.newBuilder()
.setValue(testData.get(index).getValue())
.setFamilyName(COLUMN_FAMILY_NAME))
.build());
c.output(KV.of(testData.get(index).getKey(), mutations));
}
}))
.apply(BigtableIO.write()
.withBigtableOptions(bigtableOptions)
.withTableId(tableId));
p.run();
// Test number of column families and column family name equality
Table table = getTable(tableName);
assertThat(table.getColumnFamilies().keySet(), Matchers.hasSize(1));
assertThat(table.getColumnFamilies(), Matchers.hasKey(COLUMN_FAMILY_NAME));
// Test table data equality
List<KV<ByteString, ByteString>> tableData = getTableData(tableName);
assertThat(tableData, Matchers.containsInAnyOrder(testData.toArray()));
}
@After
public void tearDown() throws Exception {
final String tableName = bigtableOptions.getInstanceName().toTableNameStr(tableId);
deleteTable(tableName);
session.close();
}
////////////////////////////////////////////////////////////////////////////////////////////
/** Helper function to generate KV test data. */
private List<KV<ByteString, ByteString>> generateTableData(int numRows) {
List<KV<ByteString, ByteString>> testData = new ArrayList<>(numRows);
for (int i = 0; i < numRows; ++i) {
ByteString key = ByteString.copyFromUtf8(String.format("key%09d", i));
ByteString value = ByteString.copyFromUtf8(String.format("value%09d", i));
testData.add(KV.of(key, value));
}
return testData;
}
/** Helper function to create an empty table. */
private void createEmptyTable(String instanceName, String tableId) {
Table.Builder tableBuilder = Table.newBuilder();
Map<String, ColumnFamily> columnFamilies = tableBuilder.getMutableColumnFamilies();
columnFamilies.put(COLUMN_FAMILY_NAME, ColumnFamily.newBuilder().build());
CreateTableRequest.Builder createTableRequestBuilder = CreateTableRequest.newBuilder()
.setParent(instanceName)
.setTableId(tableId)
.setTable(tableBuilder.build());
tableAdminClient.createTable(createTableRequestBuilder.build());
}
/** Helper function to get a table. */
private Table getTable(String tableName) {
GetTableRequest.Builder getTableRequestBuilder = GetTableRequest.newBuilder()
.setName(tableName);
return tableAdminClient.getTable(getTableRequestBuilder.build());
}
/** Helper function to get a table's data. */
private List<KV<ByteString, ByteString>> getTableData(String tableName) throws IOException {
// Add empty range to avoid TARGET_NOT_SET error
RowRange range = RowRange.newBuilder()
.setStartKeyClosed(ByteString.EMPTY)
.setEndKeyOpen(ByteString.EMPTY)
.build();
RowSet rowSet = RowSet.newBuilder()
.addRowRanges(range)
.build();
ReadRowsRequest.Builder readRowsRequestBuilder = ReadRowsRequest.newBuilder()
.setTableName(tableName)
.setRows(rowSet);
ResultScanner<Row> scanner = session.getDataClient().readRows(readRowsRequestBuilder.build());
Row currentRow;
List<KV<ByteString, ByteString>> tableData = new ArrayList<>();
while ((currentRow = scanner.next()) != null) {
ByteString key = currentRow.getKey();
ByteString value = currentRow.getFamilies(0).getColumns(0).getCells(0).getValue();
tableData.add(KV.of(key, value));
}
scanner.close();
return tableData;
}
/** Helper function to delete a table. */
private void deleteTable(String tableName) {
DeleteTableRequest.Builder deleteTableRequestBuilder = DeleteTableRequest.newBuilder()
.setName(tableName);
tableAdminClient.deleteTable(deleteTableRequestBuilder.build());
}
}
| sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.bigtable;
import static org.junit.Assert.assertThat;
import com.google.bigtable.admin.v2.ColumnFamily;
import com.google.bigtable.admin.v2.CreateTableRequest;
import com.google.bigtable.admin.v2.DeleteTableRequest;
import com.google.bigtable.admin.v2.GetTableRequest;
import com.google.bigtable.admin.v2.Table;
import com.google.bigtable.v2.Mutation;
import com.google.bigtable.v2.ReadRowsRequest;
import com.google.bigtable.v2.Row;
import com.google.bigtable.v2.RowRange;
import com.google.bigtable.v2.RowSet;
import com.google.cloud.bigtable.config.BigtableOptions;
import com.google.cloud.bigtable.config.CredentialOptions;
import com.google.cloud.bigtable.config.RetryOptions;
import com.google.cloud.bigtable.grpc.BigtableSession;
import com.google.cloud.bigtable.grpc.BigtableTableAdminClient;
import com.google.cloud.bigtable.grpc.scanner.ResultScanner;
import com.google.common.collect.ImmutableList;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.CountingInput;
import org.apache.beam.sdk.options.GcpOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* End-to-end tests of BigtableWrite.
*/
@RunWith(JUnit4.class)
public class BigtableWriteIT implements Serializable {
/**
* These tests requires a static instances because the writers go through a serialization step
* when executing the test and would not affect passed-in objects otherwise.
*/
private static final String COLUMN_FAMILY_NAME = "cf";
private static BigtableTestOptions options;
private BigtableOptions bigtableOptions;
private static BigtableSession session;
private static BigtableTableAdminClient tableAdminClient;
private final String tableId =
String.format("BigtableWriteIT-%tF-%<tH-%<tM-%<tS-%<tL", new Date());
@Before
public void setup() throws Exception {
PipelineOptionsFactory.register(BigtableTestOptions.class);
options = TestPipeline.testingPipelineOptions().as(BigtableTestOptions.class);
// RetryOptions streamingBatchSize must be explicitly set for getTableData()
RetryOptions.Builder retryOptionsBuilder = new RetryOptions.Builder();
retryOptionsBuilder.setStreamingBatchSize(
retryOptionsBuilder.build().getStreamingBufferSize() / 2);
BigtableOptions.Builder bigtableOptionsBuilder =
new BigtableOptions.Builder()
.setProjectId(options.getProjectId())
.setInstanceId(options.getInstanceId())
.setUserAgent("apache-beam-test")
.setRetryOptions(retryOptionsBuilder.build())
.setCredentialOptions(
CredentialOptions.credential(options.as(GcpOptions.class).getGcpCredential()));
bigtableOptions = bigtableOptionsBuilder.build();
session = new BigtableSession(bigtableOptions);
tableAdminClient = session.getTableAdminClient();
}
@Test
public void testE2EBigtableWrite() throws Exception {
final String tableName = bigtableOptions.getInstanceName().toTableNameStr(tableId);
final String instanceName = bigtableOptions.getInstanceName().toString();
final int numRows = 1000;
final List<KV<ByteString, ByteString>> testData = generateTableData(numRows);
createEmptyTable(instanceName, tableId);
Pipeline p = Pipeline.create(options);
p.apply(CountingInput.upTo(numRows))
.apply(ParDo.of(new DoFn<Long, KV<ByteString, Iterable<Mutation>>>() {
@ProcessElement
public void processElement(ProcessContext c) {
int index = c.element().intValue();
Iterable<Mutation> mutations =
ImmutableList.of(Mutation.newBuilder()
.setSetCell(
Mutation.SetCell.newBuilder()
.setValue(testData.get(index).getValue())
.setFamilyName(COLUMN_FAMILY_NAME))
.build());
c.output(KV.of(testData.get(index).getKey(), mutations));
}
}))
.apply(BigtableIO.write()
.withBigtableOptions(bigtableOptions)
.withTableId(tableId));
p.run();
// Test number of column families and column family name equality
Table table = getTable(tableName);
assertThat(table.getColumnFamilies().keySet(), Matchers.hasSize(1));
assertThat(table.getColumnFamilies(), Matchers.hasKey(COLUMN_FAMILY_NAME));
// Test table data equality
List<KV<ByteString, ByteString>> tableData = getTableData(tableName);
assertThat(tableData, Matchers.containsInAnyOrder(testData.toArray()));
}
@After
public void tearDown() throws Exception {
final String tableName = bigtableOptions.getInstanceName().toTableNameStr(tableId);
deleteTable(tableName);
session.close();
}
////////////////////////////////////////////////////////////////////////////////////////////
/** Helper function to generate KV test data. */
private List<KV<ByteString, ByteString>> generateTableData(int numRows) {
List<KV<ByteString, ByteString>> testData = new ArrayList<>(numRows);
for (int i = 0; i < numRows; ++i) {
ByteString key = ByteString.copyFromUtf8(String.format("key%09d", i));
ByteString value = ByteString.copyFromUtf8(String.format("value%09d", i));
testData.add(KV.of(key, value));
}
return testData;
}
/** Helper function to create an empty table. */
private void createEmptyTable(String instanceName, String tableId) {
Table.Builder tableBuilder = Table.newBuilder();
Map<String, ColumnFamily> columnFamilies = tableBuilder.getMutableColumnFamilies();
columnFamilies.put(COLUMN_FAMILY_NAME, ColumnFamily.newBuilder().build());
CreateTableRequest.Builder createTableRequestBuilder = CreateTableRequest.newBuilder()
.setParent(instanceName)
.setTableId(tableId)
.setTable(tableBuilder.build());
tableAdminClient.createTable(createTableRequestBuilder.build());
}
/** Helper function to get a table. */
private Table getTable(String tableName) {
GetTableRequest.Builder getTableRequestBuilder = GetTableRequest.newBuilder()
.setName(tableName);
return tableAdminClient.getTable(getTableRequestBuilder.build());
}
/** Helper function to get a table's data. */
private List<KV<ByteString, ByteString>> getTableData(String tableName) throws IOException {
// Add empty range to avoid TARGET_NOT_SET error
RowRange range = RowRange.newBuilder()
.setStartKeyClosed(ByteString.EMPTY)
.setEndKeyOpen(ByteString.EMPTY)
.build();
RowSet rowSet = RowSet.newBuilder()
.addRowRanges(range)
.build();
ReadRowsRequest.Builder readRowsRequestBuilder = ReadRowsRequest.newBuilder()
.setTableName(tableName)
.setRows(rowSet);
ResultScanner<Row> scanner = session.getDataClient().readRows(readRowsRequestBuilder.build());
Row currentRow;
List<KV<ByteString, ByteString>> tableData = new ArrayList<>();
while ((currentRow = scanner.next()) != null) {
ByteString key = currentRow.getKey();
ByteString value = currentRow.getFamilies(0).getColumns(0).getCells(0).getValue();
tableData.add(KV.of(key, value));
}
scanner.close();
return tableData;
}
/** Helper function to delete a table. */
private void deleteTable(String tableName) {
DeleteTableRequest.Builder deleteTableRequestBuilder = DeleteTableRequest.newBuilder()
.setName(tableName);
tableAdminClient.deleteTable(deleteTableRequestBuilder.build());
}
}
| This closes #1350
| sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java | This closes #1350 |
|
Java | apache-2.0 | 03724c5210644e0c8660a606d45d9a27f4962419 | 0 | SyncFree/SwiftCloud,SyncFree/SwiftCloud,SyncFree/SwiftCloud,SyncFree/SwiftCloud | package swift.crdt.operations;
import swift.clocks.CausalityClock;
import swift.clocks.Timestamp;
import swift.clocks.TripleTimestamp;
import swift.crdt.RegisterVersioned;
import swift.crdt.interfaces.CRDTOperation;
import swift.crdt.interfaces.Copyable;
public class RegisterUpdate<V extends Copyable> extends BaseOperation<RegisterVersioned<V>> {
private V val;
private CausalityClock c;
// required for kryo
public RegisterUpdate() {
}
public RegisterUpdate(TripleTimestamp ts, V val, CausalityClock c) {
super(ts);
this.val = val;
this.c = c;
}
public V getVal() {
return this.val;
}
@Override
public void replaceDependentOpTimestamp(Timestamp oldTs, Timestamp newTs) {
if (c.includes(oldTs)) {
c.drop(oldTs);
c.record(newTs);
}
}
@Override
public void applyTo(RegisterVersioned<V> register) {
register.update(val, getTimestamp(), c);
}
@Override
public CRDTOperation<RegisterVersioned<V>> withBaseTimestamp(Timestamp ts) {
return new RegisterUpdate<V>(getTimestamp().withBaseTimestamp(ts), val, c);
}
}
| src/swift/crdt/operations/RegisterUpdate.java | package swift.crdt.operations;
import swift.clocks.CausalityClock;
import swift.clocks.Timestamp;
import swift.clocks.TripleTimestamp;
import swift.crdt.RegisterVersioned;
import swift.crdt.interfaces.CRDTOperation;
import swift.crdt.interfaces.Copyable;
public class RegisterUpdate<V extends Copyable> extends BaseOperation<RegisterVersioned<V>> {
private V val;
private CausalityClock c;
// required for kryo
public RegisterUpdate() {
}
public RegisterUpdate(TripleTimestamp ts, V val, CausalityClock c) {
super(ts);
this.val = val;
this.c = c;
}
public V getVal() {
return this.val;
}
@Override
public void replaceDependentOpTimestamp(Timestamp oldTs, Timestamp newTs) {
// Insert does not rely on any timestamp dependency.
}
@Override
public void applyTo(RegisterVersioned<V> register) {
register.update(val, getTimestamp(), c);
}
@Override
public CRDTOperation<RegisterVersioned<V>> withBaseTimestamp(Timestamp ts) {
return new RegisterUpdate<V>(getTimestamp().withBaseTimestamp(ts), val, c);
}
}
| RegisterUpdate fix: clock needs to be updated during timestamp rewriting
git-svn-id: a3fb842d882947c797cf684233f17bdc7f2c8277@412 049c2a90-bdf1-4f83-8191-3719d539f8e0
| src/swift/crdt/operations/RegisterUpdate.java | RegisterUpdate fix: clock needs to be updated during timestamp rewriting |
|
Java | apache-2.0 | 7c741f74af2faa8513f7e2955c5dc924560e94a3 | 0 | ebyhr/presto,ebyhr/presto,losipiuk/presto,ebyhr/presto,Praveen2112/presto,smartnews/presto,losipiuk/presto,losipiuk/presto,smartnews/presto,dain/presto,losipiuk/presto,dain/presto,ebyhr/presto,Praveen2112/presto,smartnews/presto,smartnews/presto,smartnews/presto,dain/presto,Praveen2112/presto,Praveen2112/presto,Praveen2112/presto,losipiuk/presto,ebyhr/presto,dain/presto,dain/presto | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.iceberg;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Streams;
import io.trino.tempto.ProductTest;
import io.trino.tempto.query.QueryResult;
import org.assertj.core.api.Assertions;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.tempto.assertions.QueryAssert.Row;
import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.ICEBERG;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.iceberg.TestIcebergSparkCompatibility.CreateMode.CREATE_TABLE_AND_INSERT;
import static io.trino.tests.product.iceberg.TestIcebergSparkCompatibility.CreateMode.CREATE_TABLE_AS_SELECT;
import static io.trino.tests.product.iceberg.TestIcebergSparkCompatibility.CreateMode.CREATE_TABLE_WITH_NO_DATA_AND_INSERT;
import static io.trino.tests.product.utils.QueryExecutors.onSpark;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static org.testng.Assert.assertTrue;
public class TestIcebergSparkCompatibility
extends ProductTest
{
// see spark-defaults.conf
private static final String SPARK_CATALOG = "iceberg_test";
private static final String TRINO_CATALOG = "iceberg";
private static final String TEST_SCHEMA_NAME = "default";
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "unsupportedStorageFormats")
public void testTrinoWithUnsupportedFileFormat(StorageFormat storageFormat)
{
String tableName = "test_trino_unsupported_file_format_" + storageFormat;
String trinoTableName = trinoTableName(tableName);
String sparkTableName = sparkTableName(tableName);
onSpark().executeQuery(format("CREATE TABLE %s (x bigint) USING ICEBERG TBLPROPERTIES ('write.format.default'='%s')", sparkTableName, storageFormat));
onSpark().executeQuery(format("INSERT INTO %s VALUES (42)", sparkTableName));
assertQueryFailure(() -> onTrino().executeQuery("SELECT * FROM " + trinoTableName))
.hasMessageMatching("Query failed \\(#\\w+\\):\\Q File format not supported for Iceberg: " + storageFormat);
assertQueryFailure(() -> onTrino().executeQuery(format("INSERT INTO %s VALUES (42)", trinoTableName)))
.hasMessageMatching("Query failed \\(#\\w+\\):\\Q File format not supported for Iceberg: " + storageFormat);
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoReadingSparkData(StorageFormat storageFormat)
{
String baseTableName = "test_trino_reading_primitive_types_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format(
"CREATE TABLE %s (" +
" _string STRING" +
", _bigint BIGINT" +
", _integer INTEGER" +
", _real REAL" +
", _double DOUBLE" +
", _short_decimal decimal(8,2)" +
", _long_decimal decimal(38,19)" +
", _boolean BOOLEAN" +
", _timestamp TIMESTAMP" +
", _date DATE" +
") USING ICEBERG " +
"TBLPROPERTIES ('write.format.default'='%s')",
sparkTableName,
storageFormat));
// Validate queries on an empty table created by Spark
assertThat(onTrino().executeQuery(format("SELECT * FROM %s", trinoTableName("\"" + baseTableName + "$snapshots\"")))).hasNoRows();
assertThat(onTrino().executeQuery(format("SELECT * FROM %s", trinoTableName))).hasNoRows();
onSpark().executeQuery(format(
"INSERT INTO %s VALUES (" +
"'a_string'" +
", 1000000000000000" +
", 1000000000" +
", 10000000.123" +
", 100000000000.123" +
", CAST('123456.78' AS decimal(8,2))" +
", CAST('1234567890123456789.0123456789012345678' AS decimal(38,19))" +
", true" +
", TIMESTAMP '2020-06-28 14:16:00.456'" +
", DATE '1950-06-28'" +
")",
sparkTableName));
Row row = row(
"a_string",
1000000000000000L,
1000000000,
10000000.123F,
100000000000.123,
new BigDecimal("123456.78"),
new BigDecimal("1234567890123456789.0123456789012345678"),
true,
Timestamp.valueOf("2020-06-28 14:16:00.456"),
Date.valueOf("1950-06-28"));
assertThat(onSpark().executeQuery(
"SELECT " +
" _string" +
", _bigint" +
", _integer" +
", _real" +
", _double" +
", _short_decimal" +
", _long_decimal" +
", _boolean" +
", _timestamp" +
", _date" +
" FROM " + sparkTableName))
.containsOnly(row);
assertThat(onTrino().executeQuery(
"SELECT " +
" _string" +
", _bigint" +
", _integer" +
", _real" +
", _double" +
", _short_decimal" +
", _long_decimal" +
", _boolean" +
", CAST(_timestamp AS TIMESTAMP)" + // TODO test the value without a CAST from timestamp with time zone to timestamp
", _date" +
" FROM " + trinoTableName))
.containsOnly(row);
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "testSparkReadingTrinoDataDataProvider")
public void testSparkReadingTrinoData(StorageFormat storageFormat, CreateMode createMode)
{
String baseTableName = "test_spark_reading_primitive_types_" + storageFormat + "_" + createMode;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
String namedValues = "SELECT " +
" VARCHAR 'a_string' _string " +
", 1000000000000000 _bigint " +
", 1000000000 _integer " +
", REAL '10000000.123' _real " +
", DOUBLE '100000000000.123' _double " +
", DECIMAL '123456.78' _short_decimal " +
", DECIMAL '1234567890123456789.0123456789012345678' _long_decimal " +
", true _boolean " +
//", TIMESTAMP '2020-06-28 14:16:00.456' _timestamp " +
", TIMESTAMP '2021-08-03 08:32:21.123456 Europe/Warsaw' _timestamptz " +
", DATE '1950-06-28' _date " +
//", TIME '01:23:45.123456' _time " +
"";
switch (createMode) {
case CREATE_TABLE_AND_INSERT:
onTrino().executeQuery(format(
"CREATE TABLE %s (" +
" _string VARCHAR" +
", _bigint BIGINT" +
", _integer INTEGER" +
", _real REAL" +
", _double DOUBLE" +
", _short_decimal decimal(8,2)" +
", _long_decimal decimal(38,19)" +
", _boolean BOOLEAN" +
//", _timestamp TIMESTAMP" -- per https://iceberg.apache.org/spark-writes/ Iceberg's timestamp is currently not supported with Spark
", _timestamptz timestamp(6) with time zone" +
", _date DATE" +
//", _time time(6)" + -- per https://iceberg.apache.org/spark-writes/ Iceberg's time is currently not supported with Spark
") WITH (format = '%s')",
trinoTableName,
storageFormat));
onTrino().executeQuery(format("INSERT INTO %s %s", trinoTableName, namedValues));
break;
case CREATE_TABLE_AS_SELECT:
onTrino().executeQuery(format("CREATE TABLE %s AS %s", trinoTableName, namedValues));
break;
case CREATE_TABLE_WITH_NO_DATA_AND_INSERT:
onTrino().executeQuery(format("CREATE TABLE %s AS %s WITH NO DATA", trinoTableName, namedValues));
onTrino().executeQuery(format("INSERT INTO %s %s", trinoTableName, namedValues));
break;
default:
throw new UnsupportedOperationException("Unsupported create mode: " + createMode);
}
Row row = row(
"a_string",
1000000000000000L,
1000000000,
10000000.123F,
100000000000.123,
new BigDecimal("123456.78"),
new BigDecimal("1234567890123456789.0123456789012345678"),
true,
//"2020-06-28 14:16:00.456",
"2021-08-03 06:32:21.123456 UTC", // Iceberg's timestamptz stores point in time, without zone
"1950-06-28"
// "01:23:45.123456"
/**/);
assertThat(onTrino().executeQuery(
"SELECT " +
" _string" +
", _bigint" +
", _integer" +
", _real" +
", _double" +
", _short_decimal" +
", _long_decimal" +
", _boolean" +
// _timestamp OR CAST(_timestamp AS varchar)
", CAST(_timestamptz AS varchar)" +
", CAST(_date AS varchar)" +
//", CAST(_time AS varchar)" +
" FROM " + trinoTableName))
.containsOnly(row);
assertThat(onSpark().executeQuery(
"SELECT " +
" _string" +
", _bigint" +
", _integer" +
", _real" +
", _double" +
", _short_decimal" +
", _long_decimal" +
", _boolean" +
// _timestamp OR CAST(_timestamp AS string)
", CAST(_timestamptz AS string) || ' UTC'" + // Iceberg timestamptz is mapped to Spark timestamp and gets represented without time zone
", CAST(_date AS string)" +
// ", CAST(_time AS string)" +
" FROM " + sparkTableName))
.containsOnly(row);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@DataProvider
public Object[][] testSparkReadingTrinoDataDataProvider()
{
return Stream.of(storageFormats())
.map(array -> getOnlyElement(asList(array)))
.flatMap(storageFormat -> Stream.of(
new Object[] {storageFormat, CREATE_TABLE_AND_INSERT},
new Object[] {storageFormat, CREATE_TABLE_AS_SELECT},
new Object[] {storageFormat, CREATE_TABLE_WITH_NO_DATA_AND_INSERT}))
.toArray(Object[][]::new);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testSparkCreatesTrinoDrops()
{
String baseTableName = "test_spark_creates_trino_drops";
onSpark().executeQuery(format("CREATE TABLE %s (_string STRING, _bigint BIGINT) USING ICEBERG", sparkTableName(baseTableName)));
onTrino().executeQuery("DROP TABLE " + trinoTableName(baseTableName));
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testTrinoCreatesSparkDrops()
{
String baseTableName = "test_trino_creates_spark_drops";
onTrino().executeQuery(format("CREATE TABLE %s (_string VARCHAR, _bigint BIGINT)", trinoTableName(baseTableName)));
onSpark().executeQuery("DROP TABLE " + sparkTableName(baseTableName));
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testSparkReadsTrinoPartitionedTable(StorageFormat storageFormat)
{
String baseTableName = "test_spark_reads_trino_partitioned_table_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery(format("CREATE TABLE %s (_string VARCHAR, _bigint BIGINT) WITH (partitioning = ARRAY['_string'], format = '%s')", trinoTableName, storageFormat));
onTrino().executeQuery(format("INSERT INTO %s VALUES ('a', 1001), ('b', 1002), ('c', 1003)", trinoTableName));
Row row = row("b", 1002);
String select = "SELECT * FROM %s WHERE _string = 'b'";
assertThat(onTrino().executeQuery(format(select, trinoTableName)))
.containsOnly(row);
assertThat(onSpark().executeQuery(format(select, sparkTableName)))
.containsOnly(row);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoReadsSparkPartitionedTable(StorageFormat storageFormat)
{
String baseTableName = "test_trino_reads_spark_partitioned_table_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format(
"CREATE TABLE %s (_string STRING, _bigint BIGINT) USING ICEBERG PARTITIONED BY (_string) TBLPROPERTIES ('write.format.default'='%s')",
sparkTableName,
storageFormat));
onSpark().executeQuery(format("INSERT INTO %s VALUES ('a', 1001), ('b', 1002), ('c', 1003)", sparkTableName));
Row row = row("b", 1002);
String select = "SELECT * FROM %s WHERE _string = 'b'";
assertThat(onSpark().executeQuery(format(select, sparkTableName)))
.containsOnly(row);
assertThat(onTrino().executeQuery(format(select, trinoTableName)))
.containsOnly(row);
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoReadingCompositeSparkData(StorageFormat storageFormat)
{
String baseTableName = "test_trino_reading_spark_composites_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format("" +
"CREATE TABLE %s (" +
" doc_id string,\n" +
" info MAP<STRING, INT>,\n" +
" pets ARRAY<STRING>,\n" +
" user_info STRUCT<name:STRING, surname:STRING, age:INT, gender:STRING>)" +
" USING ICEBERG" +
" TBLPROPERTIES ('write.format.default'='%s')",
sparkTableName, storageFormat));
onSpark().executeQuery(format(
"INSERT INTO TABLE %s SELECT 'Doc213', map('age', 28, 'children', 3), array('Dog', 'Cat', 'Pig'), \n" +
"named_struct('name', 'Santa', 'surname', 'Claus','age', 1000,'gender', 'MALE')",
sparkTableName));
assertThat(onTrino().executeQuery("SELECT doc_id, info['age'], pets[2], user_info.surname FROM " + trinoTableName))
.containsOnly(row("Doc213", 28, "Cat", "Claus"));
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testSparkReadingCompositeTrinoData(StorageFormat storageFormat)
{
String baseTableName = "test_spark_reading_trino_composites_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery(format(
"CREATE TABLE %s (" +
" doc_id VARCHAR,\n" +
" info MAP(VARCHAR, INTEGER),\n" +
" pets ARRAY(VARCHAR),\n" +
" user_info ROW(name VARCHAR, surname VARCHAR, age INTEGER, gender VARCHAR)) " +
" WITH (format = '%s')",
trinoTableName,
storageFormat));
onTrino().executeQuery(format(
"INSERT INTO %s VALUES('Doc213', MAP(ARRAY['age', 'children'], ARRAY[28, 3]), ARRAY['Dog', 'Cat', 'Pig'], ROW('Santa', 'Claus', 1000, 'MALE'))",
trinoTableName));
assertThat(onSpark().executeQuery("SELECT doc_id, info['age'], pets[1], user_info.surname FROM " + sparkTableName))
.containsOnly(row("Doc213", 28, "Cat", "Claus"));
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoReadingNestedSparkData(StorageFormat storageFormat)
{
String baseTableName = "test_trino_reading_nested_spark_data_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format(
"CREATE TABLE %s (\n" +
" doc_id STRING\n" +
", nested_map MAP<STRING, ARRAY<STRUCT<sname: STRING, snumber: INT>>>\n" +
", nested_array ARRAY<MAP<STRING, ARRAY<STRUCT<mname: STRING, mnumber: INT>>>>\n" +
", nested_struct STRUCT<name:STRING, complicated: ARRAY<MAP<STRING, ARRAY<STRUCT<mname: STRING, mnumber: INT>>>>>)\n" +
" USING ICEBERG TBLPROPERTIES ('write.format.default'='%s')",
sparkTableName,
storageFormat));
onSpark().executeQuery(format(
"INSERT INTO TABLE %s SELECT" +
" 'Doc213'" +
", map('s1', array(named_struct('sname', 'ASName1', 'snumber', 201), named_struct('sname', 'ASName2', 'snumber', 202)))" +
", array(map('m1', array(named_struct('mname', 'MAS1Name1', 'mnumber', 301), named_struct('mname', 'MAS1Name2', 'mnumber', 302)))" +
" ,map('m2', array(named_struct('mname', 'MAS2Name1', 'mnumber', 401), named_struct('mname', 'MAS2Name2', 'mnumber', 402))))" +
", named_struct('name', 'S1'," +
" 'complicated', array(map('m1', array(named_struct('mname', 'SAMA1Name1', 'mnumber', 301), named_struct('mname', 'SAMA1Name2', 'mnumber', 302)))" +
" ,map('m2', array(named_struct('mname', 'SAMA2Name1', 'mnumber', 401), named_struct('mname', 'SAMA2Name2', 'mnumber', 402)))))",
sparkTableName));
Row row = row("Doc213", "ASName2", 201, "MAS2Name1", 302, "SAMA1Name1", 402);
assertThat(onSpark().executeQuery(
"SELECT" +
" doc_id" +
", nested_map['s1'][1].sname" +
", nested_map['s1'][0].snumber" +
", nested_array[1]['m2'][0].mname" +
", nested_array[0]['m1'][1].mnumber" +
", nested_struct.complicated[0]['m1'][0].mname" +
", nested_struct.complicated[1]['m2'][1].mnumber" +
" FROM " + sparkTableName))
.containsOnly(row);
assertThat(onTrino().executeQuery("SELECT" +
" doc_id" +
", nested_map['s1'][2].sname" +
", nested_map['s1'][1].snumber" +
", nested_array[2]['m2'][1].mname" +
", nested_array[1]['m1'][2].mnumber" +
", nested_struct.complicated[1]['m1'][1].mname" +
", nested_struct.complicated[2]['m2'][2].mnumber" +
" FROM " + trinoTableName))
.containsOnly(row);
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testSparkReadingNestedTrinoData(StorageFormat storageFormat)
{
String baseTableName = "test_spark_reading_nested_trino_data_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery(format(
"CREATE TABLE %s (\n" +
" doc_id VARCHAR\n" +
", nested_map MAP(VARCHAR, ARRAY(ROW(sname VARCHAR, snumber INT)))\n" +
", nested_array ARRAY(MAP(VARCHAR, ARRAY(ROW(mname VARCHAR, mnumber INT))))\n" +
", nested_struct ROW(name VARCHAR, complicated ARRAY(MAP(VARCHAR, ARRAY(ROW(mname VARCHAR, mnumber INT))))))" +
" WITH (format = '%s')",
trinoTableName,
storageFormat));
onTrino().executeQuery(format(
"INSERT INTO %s SELECT" +
" 'Doc213'" +
", map(array['s1'], array[array[row('ASName1', 201), row('ASName2', 202)]])" +
", array[map(array['m1'], array[array[row('MAS1Name1', 301), row('MAS1Name2', 302)]])" +
" ,map(array['m2'], array[array[row('MAS2Name1', 401), row('MAS2Name2', 402)]])]" +
", row('S1'" +
" ,array[map(array['m1'], array[array[row('SAMA1Name1', 301), row('SAMA1Name2', 302)]])" +
" ,map(array['m2'], array[array[row('SAMA2Name1', 401), row('SAMA2Name2', 402)]])])",
trinoTableName));
Row row = row("Doc213", "ASName2", 201, "MAS2Name1", 302, "SAMA1Name1", 402);
assertThat(onTrino().executeQuery(
"SELECT" +
" doc_id" +
", nested_map['s1'][2].sname" +
", nested_map['s1'][1].snumber" +
", nested_array[2]['m2'][1].mname" +
", nested_array[1]['m1'][2].mnumber" +
", nested_struct.complicated[1]['m1'][1].mname" +
", nested_struct.complicated[2]['m2'][2].mnumber" +
" FROM " + trinoTableName))
.containsOnly(row);
QueryResult sparkResult = onSpark().executeQuery(
"SELECT" +
" doc_id" +
", nested_map['s1'][1].sname" +
", nested_map['s1'][0].snumber" +
", nested_array[1]['m2'][0].mname" +
", nested_array[0]['m1'][1].mnumber" +
", nested_struct.complicated[0]['m1'][0].mname" +
", nested_struct.complicated[1]['m2'][1].mnumber" +
" FROM " + sparkTableName);
assertThat(sparkResult).containsOnly(row);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testIdBasedFieldMapping(StorageFormat storageFormat)
{
String baseTableName = "test_schema_evolution_for_nested_fields_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format(
"CREATE TABLE %s (_struct STRUCT<rename:BIGINT, keep:BIGINT, drop_and_add:BIGINT, CaseSensitive:BIGINT>, _partition BIGINT)"
+ " USING ICEBERG"
+ " partitioned by (_partition)"
+ " TBLPROPERTIES ('write.format.default' = '%s')",
sparkTableName,
storageFormat));
onSpark().executeQuery(format(
"INSERT INTO TABLE %s SELECT "
+ "named_struct('rename', 1, 'keep', 2, 'drop_and_add', 3, 'CaseSensitive', 4), "
+ "1001",
sparkTableName));
// Alter nested fields using Spark. Trino does not support this yet.
onSpark().executeQuery(format("ALTER TABLE %s RENAME COLUMN _struct.rename TO renamed", sparkTableName));
onSpark().executeQuery(format("ALTER TABLE %s DROP COLUMN _struct.drop_and_add", sparkTableName));
onSpark().executeQuery(format("ALTER TABLE %s ADD COLUMN _struct.drop_and_add BIGINT", sparkTableName));
if (storageFormat == StorageFormat.PARQUET) {
// TODO (https://github.com/trinodb/trino/issues/8750) the results should be the same for all storage formats
// TODO support Row (JAVA_OBJECT) in Tempto and switch to QueryAssert
Assertions.assertThat(onTrino().executeQuery(format("SELECT * FROM %s", trinoTableName)).rows())
.containsOnly(List.of(
rowBuilder()
// Rename does not change id
.addField("renamed", null)
.addField("keep", 2L)
.addField("CaseSensitive", 4L)
// Dropping and re-adding changes id
.addField("drop_and_add", 3L)
.build(),
1001L));
}
else {
// TODO support Row (JAVA_OBJECT) in Tempto and switch to QueryAssert
Assertions.assertThat(onTrino().executeQuery(format("SELECT * FROM %s", trinoTableName)).rows())
.containsOnly(List.of(
rowBuilder()
// Rename does not change id
.addField("renamed", 1L)
.addField("keep", 2L)
.addField("CaseSensitive", 4L)
// Dropping and re-adding changes id
.addField("drop_and_add", null)
.build(),
1001L));
}
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testTrinoShowingSparkCreatedTables()
{
String sparkTable = "test_table_listing_for_spark";
String trinoTable = "test_table_listing_for_trino";
onSpark().executeQuery(format("CREATE TABLE %s (_integer INTEGER ) USING ICEBERG", sparkTableName(sparkTable)));
onTrino().executeQuery(format("CREATE TABLE %s (_integer INTEGER )", trinoTableName(trinoTable)));
assertThat(onTrino().executeQuery(format("SHOW TABLES FROM %s LIKE '%s'", TEST_SCHEMA_NAME, "test_table_listing_for_%")))
.containsOnly(row(sparkTable), row(trinoTable));
onSpark().executeQuery("DROP TABLE " + sparkTableName(sparkTable));
onTrino().executeQuery("DROP TABLE " + trinoTableName(trinoTable));
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoWritingDataWithObjectStorageLocationProvider(StorageFormat storageFormat)
{
String baseTableName = "test_object_storage_location_provider_" + storageFormat;
String sparkTableName = sparkTableName(baseTableName);
String trinoTableName = trinoTableName(baseTableName);
String dataPath = "hdfs://hadoop-master:9000/user/hive/warehouse/test_object_storage_location_provider/obj-data";
onSpark().executeQuery(format("CREATE TABLE %s (_string STRING, _bigint BIGINT) USING ICEBERG TBLPROPERTIES (" +
"'write.object-storage.enabled'=true," +
"'write.object-storage.path'='%s'," +
"'write.format.default' = '%s')",
sparkTableName, dataPath, storageFormat));
onTrino().executeQuery(format("INSERT INTO %s VALUES ('a_string', 1000000000000000)", trinoTableName));
Row result = row("a_string", 1000000000000000L);
assertThat(onSpark().executeQuery(format("SELECT _string, _bigint FROM %s", sparkTableName))).containsOnly(result);
assertThat(onTrino().executeQuery(format("SELECT _string, _bigint FROM %s", trinoTableName))).containsOnly(result);
QueryResult queryResult = onTrino().executeQuery(format("SELECT file_path FROM %s", trinoTableName("\"" + baseTableName + "$files\"")));
assertThat(queryResult).hasRowsCount(1).hasColumnsCount(1);
assertTrue(((String) queryResult.row(0).get(0)).contains(dataPath));
// TODO: support path override in Iceberg table creation: https://github.com/trinodb/trino/issues/8861
assertQueryFailure(() -> onTrino().executeQuery("DROP TABLE " + trinoTableName))
.hasMessageContaining("contains Iceberg path override properties and cannot be dropped from Trino");
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
private static final List<String> SPECIAL_CHARACTER_VALUES = ImmutableList.of(
"with-hyphen",
"with.dot",
"with:colon",
"with/slash",
"with\\\\backslashes",
"with\\backslash",
"with=equal",
"with?question",
"with!exclamation",
"with%percent",
"with%%percents",
"with$dollar",
"with#hash",
"with*star",
"with=equals",
"with\"quote",
"with'apostrophe",
"with space",
" with space prefix",
"with space suffix ",
"with€euro",
"with non-ascii ąęłóść Θ Φ Δ",
"with����combining character",
" ����",
"���� ");
private static final String TRINO_INSERTED_PARTITION_VALUES =
Streams.mapWithIndex(SPECIAL_CHARACTER_VALUES.stream(), ((value, index) -> format("(%d, '%s')", index, escapeTrinoString(value))))
.collect(Collectors.joining(", "));
private static final String SPARK_INSERTED_PARTITION_VALUES =
Streams.mapWithIndex(SPECIAL_CHARACTER_VALUES.stream(), ((value, index) -> format("(%d, '%s')", index, escapeSparkString(value))))
.collect(Collectors.joining(", "));
private static final List<Row> EXPECTED_PARTITION_VALUES =
Streams.mapWithIndex(SPECIAL_CHARACTER_VALUES.stream(), ((value, index) -> row((int) index, value)))
.collect(toImmutableList());
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testStringPartitioningWithSpecialCharactersCtasInTrino()
{
String baseTableName = "test_string_partitioning_with_special_chars_ctas_in_trino";
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery("DROP TABLE IF EXISTS " + trinoTableName);
onTrino().executeQuery(format(
"CREATE TABLE %s (id, part_col) " +
"WITH (partitioning = ARRAY['part_col']) " +
"AS VALUES %s",
trinoTableName,
TRINO_INSERTED_PARTITION_VALUES));
assertSelectsOnSpecialCharacters(trinoTableName, sparkTableName);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testStringPartitioningWithSpecialCharactersInsertInTrino()
{
String baseTableName = "test_string_partitioning_with_special_chars_ctas_in_trino";
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery("DROP TABLE IF EXISTS " + trinoTableName);
onTrino().executeQuery(format(
"CREATE TABLE %s (id BIGINT, part_col VARCHAR) WITH (partitioning = ARRAY['part_col'])",
trinoTableName));
onTrino().executeQuery(format("INSERT INTO %s VALUES %s", trinoTableName, TRINO_INSERTED_PARTITION_VALUES));
assertSelectsOnSpecialCharacters(trinoTableName, sparkTableName);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testStringPartitioningWithSpecialCharactersInsertInSpark()
{
String baseTableName = "test_string_partitioning_with_special_chars_ctas_in_spark";
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery("DROP TABLE IF EXISTS " + trinoTableName);
onTrino().executeQuery(format(
"CREATE TABLE %s (id BIGINT, part_col VARCHAR) WITH (partitioning = ARRAY['part_col'])",
trinoTableName));
onSpark().executeQuery(format("INSERT INTO %s VALUES %s", sparkTableName, SPARK_INSERTED_PARTITION_VALUES));
assertSelectsOnSpecialCharacters(trinoTableName, sparkTableName);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
private void assertSelectsOnSpecialCharacters(String trinoTableName, String sparkTableName)
{
assertThat(onSpark().executeQuery("SELECT * FROM " + sparkTableName)).containsOnly(EXPECTED_PARTITION_VALUES);
assertThat(onTrino().executeQuery("SELECT * FROM " + trinoTableName)).containsOnly(EXPECTED_PARTITION_VALUES);
for (String value : SPECIAL_CHARACTER_VALUES) {
String trinoValue = escapeTrinoString(value);
String sparkValue = escapeSparkString(value);
// Ensure Trino written metadata is readable from Spark and vice versa
assertThat(onSpark().executeQuery("SELECT count(*) FROM " + sparkTableName + " WHERE part_col = '" + sparkValue + "'"))
.withFailMessage("Spark query with predicate containing '" + value + "' contained no matches, expected one")
.containsOnly(row(1));
assertThat(onTrino().executeQuery("SELECT count(*) FROM " + trinoTableName + " WHERE part_col = '" + trinoValue + "'"))
.withFailMessage("Trino query with predicate containing '" + value + "' contained no matches, expected one")
.containsOnly(row(1));
}
}
private static String escapeSparkString(String value)
{
return value.replace("\\", "\\\\").replace("'", "\\'");
}
private static String escapeTrinoString(String value)
{
return value.replace("'", "''");
}
private static String sparkTableName(String tableName)
{
return format("%s.%s.%s", SPARK_CATALOG, TEST_SCHEMA_NAME, tableName);
}
private static String trinoTableName(String tableName)
{
return format("%s.%s.%s", TRINO_CATALOG, TEST_SCHEMA_NAME, tableName);
}
private io.trino.jdbc.Row.Builder rowBuilder()
{
return io.trino.jdbc.Row.builder();
}
@DataProvider
public static Object[][] storageFormats()
{
return Stream.of(StorageFormat.values())
.filter(StorageFormat::isSupportedInTrino)
.map(storageFormat -> new Object[] {storageFormat})
.toArray(Object[][]::new);
}
@DataProvider
public static Object[][] unsupportedStorageFormats()
{
return Stream.of(StorageFormat.values())
.filter(storageFormat -> !storageFormat.isSupportedInTrino())
.map(storageFormat -> new Object[] {storageFormat})
.toArray(Object[][]::new);
}
public enum StorageFormat
{
PARQUET,
ORC,
AVRO,
/**/;
public boolean isSupportedInTrino()
{
// TODO (https://github.com/trinodb/trino/issues/1324) not supported in Trino yet
// - remove testTrinoWithUnsupportedFileFormat once all formats are supported
return this != AVRO;
}
}
public enum CreateMode
{
CREATE_TABLE_AND_INSERT,
CREATE_TABLE_AS_SELECT,
CREATE_TABLE_WITH_NO_DATA_AND_INSERT,
}
}
| testing/trino-product-tests/src/main/java/io/trino/tests/product/iceberg/TestIcebergSparkCompatibility.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.iceberg;
import io.trino.tempto.ProductTest;
import io.trino.tempto.query.QueryResult;
import org.assertj.core.api.Assertions;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.List;
import java.util.stream.Stream;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.tempto.assertions.QueryAssert.Row;
import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.ICEBERG;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.iceberg.TestIcebergSparkCompatibility.CreateMode.CREATE_TABLE_AND_INSERT;
import static io.trino.tests.product.iceberg.TestIcebergSparkCompatibility.CreateMode.CREATE_TABLE_AS_SELECT;
import static io.trino.tests.product.iceberg.TestIcebergSparkCompatibility.CreateMode.CREATE_TABLE_WITH_NO_DATA_AND_INSERT;
import static io.trino.tests.product.utils.QueryExecutors.onSpark;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static org.testng.Assert.assertTrue;
public class TestIcebergSparkCompatibility
extends ProductTest
{
// see spark-defaults.conf
private static final String SPARK_CATALOG = "iceberg_test";
private static final String TRINO_CATALOG = "iceberg";
private static final String TEST_SCHEMA_NAME = "default";
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "unsupportedStorageFormats")
public void testTrinoWithUnsupportedFileFormat(StorageFormat storageFormat)
{
String tableName = "test_trino_unsupported_file_format_" + storageFormat;
String trinoTableName = trinoTableName(tableName);
String sparkTableName = sparkTableName(tableName);
onSpark().executeQuery(format("CREATE TABLE %s (x bigint) USING ICEBERG TBLPROPERTIES ('write.format.default'='%s')", sparkTableName, storageFormat));
onSpark().executeQuery(format("INSERT INTO %s VALUES (42)", sparkTableName));
assertQueryFailure(() -> onTrino().executeQuery("SELECT * FROM " + trinoTableName))
.hasMessageMatching("Query failed \\(#\\w+\\):\\Q File format not supported for Iceberg: " + storageFormat);
assertQueryFailure(() -> onTrino().executeQuery(format("INSERT INTO %s VALUES (42)", trinoTableName)))
.hasMessageMatching("Query failed \\(#\\w+\\):\\Q File format not supported for Iceberg: " + storageFormat);
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoReadingSparkData(StorageFormat storageFormat)
{
String baseTableName = "test_trino_reading_primitive_types_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format(
"CREATE TABLE %s (" +
" _string STRING" +
", _bigint BIGINT" +
", _integer INTEGER" +
", _real REAL" +
", _double DOUBLE" +
", _short_decimal decimal(8,2)" +
", _long_decimal decimal(38,19)" +
", _boolean BOOLEAN" +
", _timestamp TIMESTAMP" +
", _date DATE" +
") USING ICEBERG " +
"TBLPROPERTIES ('write.format.default'='%s')",
sparkTableName,
storageFormat));
// Validate queries on an empty table created by Spark
assertThat(onTrino().executeQuery(format("SELECT * FROM %s", trinoTableName("\"" + baseTableName + "$snapshots\"")))).hasNoRows();
assertThat(onTrino().executeQuery(format("SELECT * FROM %s", trinoTableName))).hasNoRows();
onSpark().executeQuery(format(
"INSERT INTO %s VALUES (" +
"'a_string'" +
", 1000000000000000" +
", 1000000000" +
", 10000000.123" +
", 100000000000.123" +
", CAST('123456.78' AS decimal(8,2))" +
", CAST('1234567890123456789.0123456789012345678' AS decimal(38,19))" +
", true" +
", TIMESTAMP '2020-06-28 14:16:00.456'" +
", DATE '1950-06-28'" +
")",
sparkTableName));
Row row = row(
"a_string",
1000000000000000L,
1000000000,
10000000.123F,
100000000000.123,
new BigDecimal("123456.78"),
new BigDecimal("1234567890123456789.0123456789012345678"),
true,
Timestamp.valueOf("2020-06-28 14:16:00.456"),
Date.valueOf("1950-06-28"));
assertThat(onSpark().executeQuery(
"SELECT " +
" _string" +
", _bigint" +
", _integer" +
", _real" +
", _double" +
", _short_decimal" +
", _long_decimal" +
", _boolean" +
", _timestamp" +
", _date" +
" FROM " + sparkTableName))
.containsOnly(row);
assertThat(onTrino().executeQuery(
"SELECT " +
" _string" +
", _bigint" +
", _integer" +
", _real" +
", _double" +
", _short_decimal" +
", _long_decimal" +
", _boolean" +
", CAST(_timestamp AS TIMESTAMP)" + // TODO test the value without a CAST from timestamp with time zone to timestamp
", _date" +
" FROM " + trinoTableName))
.containsOnly(row);
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "testSparkReadingTrinoDataDataProvider")
public void testSparkReadingTrinoData(StorageFormat storageFormat, CreateMode createMode)
{
String baseTableName = "test_spark_reading_primitive_types_" + storageFormat + "_" + createMode;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
String namedValues = "SELECT " +
" VARCHAR 'a_string' _string " +
", 1000000000000000 _bigint " +
", 1000000000 _integer " +
", REAL '10000000.123' _real " +
", DOUBLE '100000000000.123' _double " +
", DECIMAL '123456.78' _short_decimal " +
", DECIMAL '1234567890123456789.0123456789012345678' _long_decimal " +
", true _boolean " +
//", TIMESTAMP '2020-06-28 14:16:00.456' _timestamp " +
", TIMESTAMP '2021-08-03 08:32:21.123456 Europe/Warsaw' _timestamptz " +
", DATE '1950-06-28' _date " +
//", TIME '01:23:45.123456' _time " +
"";
switch (createMode) {
case CREATE_TABLE_AND_INSERT:
onTrino().executeQuery(format(
"CREATE TABLE %s (" +
" _string VARCHAR" +
", _bigint BIGINT" +
", _integer INTEGER" +
", _real REAL" +
", _double DOUBLE" +
", _short_decimal decimal(8,2)" +
", _long_decimal decimal(38,19)" +
", _boolean BOOLEAN" +
//", _timestamp TIMESTAMP" -- per https://iceberg.apache.org/spark-writes/ Iceberg's timestamp is currently not supported with Spark
", _timestamptz timestamp(6) with time zone" +
", _date DATE" +
//", _time time(6)" + -- per https://iceberg.apache.org/spark-writes/ Iceberg's time is currently not supported with Spark
") WITH (format = '%s')",
trinoTableName,
storageFormat));
onTrino().executeQuery(format("INSERT INTO %s %s", trinoTableName, namedValues));
break;
case CREATE_TABLE_AS_SELECT:
onTrino().executeQuery(format("CREATE TABLE %s AS %s", trinoTableName, namedValues));
break;
case CREATE_TABLE_WITH_NO_DATA_AND_INSERT:
onTrino().executeQuery(format("CREATE TABLE %s AS %s WITH NO DATA", trinoTableName, namedValues));
onTrino().executeQuery(format("INSERT INTO %s %s", trinoTableName, namedValues));
break;
default:
throw new UnsupportedOperationException("Unsupported create mode: " + createMode);
}
Row row = row(
"a_string",
1000000000000000L,
1000000000,
10000000.123F,
100000000000.123,
new BigDecimal("123456.78"),
new BigDecimal("1234567890123456789.0123456789012345678"),
true,
//"2020-06-28 14:16:00.456",
"2021-08-03 06:32:21.123456 UTC", // Iceberg's timestamptz stores point in time, without zone
"1950-06-28"
// "01:23:45.123456"
/**/);
assertThat(onTrino().executeQuery(
"SELECT " +
" _string" +
", _bigint" +
", _integer" +
", _real" +
", _double" +
", _short_decimal" +
", _long_decimal" +
", _boolean" +
// _timestamp OR CAST(_timestamp AS varchar)
", CAST(_timestamptz AS varchar)" +
", CAST(_date AS varchar)" +
//", CAST(_time AS varchar)" +
" FROM " + trinoTableName))
.containsOnly(row);
assertThat(onSpark().executeQuery(
"SELECT " +
" _string" +
", _bigint" +
", _integer" +
", _real" +
", _double" +
", _short_decimal" +
", _long_decimal" +
", _boolean" +
// _timestamp OR CAST(_timestamp AS string)
", CAST(_timestamptz AS string) || ' UTC'" + // Iceberg timestamptz is mapped to Spark timestamp and gets represented without time zone
", CAST(_date AS string)" +
// ", CAST(_time AS string)" +
" FROM " + sparkTableName))
.containsOnly(row);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@DataProvider
public Object[][] testSparkReadingTrinoDataDataProvider()
{
return Stream.of(storageFormats())
.map(array -> getOnlyElement(asList(array)))
.flatMap(storageFormat -> Stream.of(
new Object[] {storageFormat, CREATE_TABLE_AND_INSERT},
new Object[] {storageFormat, CREATE_TABLE_AS_SELECT},
new Object[] {storageFormat, CREATE_TABLE_WITH_NO_DATA_AND_INSERT}))
.toArray(Object[][]::new);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testSparkCreatesTrinoDrops()
{
String baseTableName = "test_spark_creates_trino_drops";
onSpark().executeQuery(format("CREATE TABLE %s (_string STRING, _bigint BIGINT) USING ICEBERG", sparkTableName(baseTableName)));
onTrino().executeQuery("DROP TABLE " + trinoTableName(baseTableName));
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testTrinoCreatesSparkDrops()
{
String baseTableName = "test_trino_creates_spark_drops";
onTrino().executeQuery(format("CREATE TABLE %s (_string VARCHAR, _bigint BIGINT)", trinoTableName(baseTableName)));
onSpark().executeQuery("DROP TABLE " + sparkTableName(baseTableName));
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testSparkReadsTrinoPartitionedTable(StorageFormat storageFormat)
{
String baseTableName = "test_spark_reads_trino_partitioned_table_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery(format("CREATE TABLE %s (_string VARCHAR, _bigint BIGINT) WITH (partitioning = ARRAY['_string'], format = '%s')", trinoTableName, storageFormat));
onTrino().executeQuery(format("INSERT INTO %s VALUES ('a', 1001), ('b', 1002), ('c', 1003)", trinoTableName));
Row row = row("b", 1002);
String select = "SELECT * FROM %s WHERE _string = 'b'";
assertThat(onTrino().executeQuery(format(select, trinoTableName)))
.containsOnly(row);
assertThat(onSpark().executeQuery(format(select, sparkTableName)))
.containsOnly(row);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoReadsSparkPartitionedTable(StorageFormat storageFormat)
{
String baseTableName = "test_trino_reads_spark_partitioned_table_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format(
"CREATE TABLE %s (_string STRING, _bigint BIGINT) USING ICEBERG PARTITIONED BY (_string) TBLPROPERTIES ('write.format.default'='%s')",
sparkTableName,
storageFormat));
onSpark().executeQuery(format("INSERT INTO %s VALUES ('a', 1001), ('b', 1002), ('c', 1003)", sparkTableName));
Row row = row("b", 1002);
String select = "SELECT * FROM %s WHERE _string = 'b'";
assertThat(onSpark().executeQuery(format(select, sparkTableName)))
.containsOnly(row);
assertThat(onTrino().executeQuery(format(select, trinoTableName)))
.containsOnly(row);
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoReadingCompositeSparkData(StorageFormat storageFormat)
{
String baseTableName = "test_trino_reading_spark_composites_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format("" +
"CREATE TABLE %s (" +
" doc_id string,\n" +
" info MAP<STRING, INT>,\n" +
" pets ARRAY<STRING>,\n" +
" user_info STRUCT<name:STRING, surname:STRING, age:INT, gender:STRING>)" +
" USING ICEBERG" +
" TBLPROPERTIES ('write.format.default'='%s')",
sparkTableName, storageFormat));
onSpark().executeQuery(format(
"INSERT INTO TABLE %s SELECT 'Doc213', map('age', 28, 'children', 3), array('Dog', 'Cat', 'Pig'), \n" +
"named_struct('name', 'Santa', 'surname', 'Claus','age', 1000,'gender', 'MALE')",
sparkTableName));
assertThat(onTrino().executeQuery("SELECT doc_id, info['age'], pets[2], user_info.surname FROM " + trinoTableName))
.containsOnly(row("Doc213", 28, "Cat", "Claus"));
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testSparkReadingCompositeTrinoData(StorageFormat storageFormat)
{
String baseTableName = "test_spark_reading_trino_composites_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery(format(
"CREATE TABLE %s (" +
" doc_id VARCHAR,\n" +
" info MAP(VARCHAR, INTEGER),\n" +
" pets ARRAY(VARCHAR),\n" +
" user_info ROW(name VARCHAR, surname VARCHAR, age INTEGER, gender VARCHAR)) " +
" WITH (format = '%s')",
trinoTableName,
storageFormat));
onTrino().executeQuery(format(
"INSERT INTO %s VALUES('Doc213', MAP(ARRAY['age', 'children'], ARRAY[28, 3]), ARRAY['Dog', 'Cat', 'Pig'], ROW('Santa', 'Claus', 1000, 'MALE'))",
trinoTableName));
assertThat(onSpark().executeQuery("SELECT doc_id, info['age'], pets[1], user_info.surname FROM " + sparkTableName))
.containsOnly(row("Doc213", 28, "Cat", "Claus"));
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoReadingNestedSparkData(StorageFormat storageFormat)
{
String baseTableName = "test_trino_reading_nested_spark_data_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format(
"CREATE TABLE %s (\n" +
" doc_id STRING\n" +
", nested_map MAP<STRING, ARRAY<STRUCT<sname: STRING, snumber: INT>>>\n" +
", nested_array ARRAY<MAP<STRING, ARRAY<STRUCT<mname: STRING, mnumber: INT>>>>\n" +
", nested_struct STRUCT<name:STRING, complicated: ARRAY<MAP<STRING, ARRAY<STRUCT<mname: STRING, mnumber: INT>>>>>)\n" +
" USING ICEBERG TBLPROPERTIES ('write.format.default'='%s')",
sparkTableName,
storageFormat));
onSpark().executeQuery(format(
"INSERT INTO TABLE %s SELECT" +
" 'Doc213'" +
", map('s1', array(named_struct('sname', 'ASName1', 'snumber', 201), named_struct('sname', 'ASName2', 'snumber', 202)))" +
", array(map('m1', array(named_struct('mname', 'MAS1Name1', 'mnumber', 301), named_struct('mname', 'MAS1Name2', 'mnumber', 302)))" +
" ,map('m2', array(named_struct('mname', 'MAS2Name1', 'mnumber', 401), named_struct('mname', 'MAS2Name2', 'mnumber', 402))))" +
", named_struct('name', 'S1'," +
" 'complicated', array(map('m1', array(named_struct('mname', 'SAMA1Name1', 'mnumber', 301), named_struct('mname', 'SAMA1Name2', 'mnumber', 302)))" +
" ,map('m2', array(named_struct('mname', 'SAMA2Name1', 'mnumber', 401), named_struct('mname', 'SAMA2Name2', 'mnumber', 402)))))",
sparkTableName));
Row row = row("Doc213", "ASName2", 201, "MAS2Name1", 302, "SAMA1Name1", 402);
assertThat(onSpark().executeQuery(
"SELECT" +
" doc_id" +
", nested_map['s1'][1].sname" +
", nested_map['s1'][0].snumber" +
", nested_array[1]['m2'][0].mname" +
", nested_array[0]['m1'][1].mnumber" +
", nested_struct.complicated[0]['m1'][0].mname" +
", nested_struct.complicated[1]['m2'][1].mnumber" +
" FROM " + sparkTableName))
.containsOnly(row);
assertThat(onTrino().executeQuery("SELECT" +
" doc_id" +
", nested_map['s1'][2].sname" +
", nested_map['s1'][1].snumber" +
", nested_array[2]['m2'][1].mname" +
", nested_array[1]['m1'][2].mnumber" +
", nested_struct.complicated[1]['m1'][1].mname" +
", nested_struct.complicated[2]['m2'][2].mnumber" +
" FROM " + trinoTableName))
.containsOnly(row);
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testSparkReadingNestedTrinoData(StorageFormat storageFormat)
{
String baseTableName = "test_spark_reading_nested_trino_data_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onTrino().executeQuery(format(
"CREATE TABLE %s (\n" +
" doc_id VARCHAR\n" +
", nested_map MAP(VARCHAR, ARRAY(ROW(sname VARCHAR, snumber INT)))\n" +
", nested_array ARRAY(MAP(VARCHAR, ARRAY(ROW(mname VARCHAR, mnumber INT))))\n" +
", nested_struct ROW(name VARCHAR, complicated ARRAY(MAP(VARCHAR, ARRAY(ROW(mname VARCHAR, mnumber INT))))))" +
" WITH (format = '%s')",
trinoTableName,
storageFormat));
onTrino().executeQuery(format(
"INSERT INTO %s SELECT" +
" 'Doc213'" +
", map(array['s1'], array[array[row('ASName1', 201), row('ASName2', 202)]])" +
", array[map(array['m1'], array[array[row('MAS1Name1', 301), row('MAS1Name2', 302)]])" +
" ,map(array['m2'], array[array[row('MAS2Name1', 401), row('MAS2Name2', 402)]])]" +
", row('S1'" +
" ,array[map(array['m1'], array[array[row('SAMA1Name1', 301), row('SAMA1Name2', 302)]])" +
" ,map(array['m2'], array[array[row('SAMA2Name1', 401), row('SAMA2Name2', 402)]])])",
trinoTableName));
Row row = row("Doc213", "ASName2", 201, "MAS2Name1", 302, "SAMA1Name1", 402);
assertThat(onTrino().executeQuery(
"SELECT" +
" doc_id" +
", nested_map['s1'][2].sname" +
", nested_map['s1'][1].snumber" +
", nested_array[2]['m2'][1].mname" +
", nested_array[1]['m1'][2].mnumber" +
", nested_struct.complicated[1]['m1'][1].mname" +
", nested_struct.complicated[2]['m2'][2].mnumber" +
" FROM " + trinoTableName))
.containsOnly(row);
QueryResult sparkResult = onSpark().executeQuery(
"SELECT" +
" doc_id" +
", nested_map['s1'][1].sname" +
", nested_map['s1'][0].snumber" +
", nested_array[1]['m2'][0].mname" +
", nested_array[0]['m1'][1].mnumber" +
", nested_struct.complicated[0]['m1'][0].mname" +
", nested_struct.complicated[1]['m2'][1].mnumber" +
" FROM " + sparkTableName);
assertThat(sparkResult).containsOnly(row);
onTrino().executeQuery("DROP TABLE " + trinoTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testIdBasedFieldMapping(StorageFormat storageFormat)
{
String baseTableName = "test_schema_evolution_for_nested_fields_" + storageFormat;
String trinoTableName = trinoTableName(baseTableName);
String sparkTableName = sparkTableName(baseTableName);
onSpark().executeQuery(format(
"CREATE TABLE %s (_struct STRUCT<rename:BIGINT, keep:BIGINT, drop_and_add:BIGINT, CaseSensitive:BIGINT>, _partition BIGINT)"
+ " USING ICEBERG"
+ " partitioned by (_partition)"
+ " TBLPROPERTIES ('write.format.default' = '%s')",
sparkTableName,
storageFormat));
onSpark().executeQuery(format(
"INSERT INTO TABLE %s SELECT "
+ "named_struct('rename', 1, 'keep', 2, 'drop_and_add', 3, 'CaseSensitive', 4), "
+ "1001",
sparkTableName));
// Alter nested fields using Spark. Trino does not support this yet.
onSpark().executeQuery(format("ALTER TABLE %s RENAME COLUMN _struct.rename TO renamed", sparkTableName));
onSpark().executeQuery(format("ALTER TABLE %s DROP COLUMN _struct.drop_and_add", sparkTableName));
onSpark().executeQuery(format("ALTER TABLE %s ADD COLUMN _struct.drop_and_add BIGINT", sparkTableName));
if (storageFormat == StorageFormat.PARQUET) {
// TODO (https://github.com/trinodb/trino/issues/8750) the results should be the same for all storage formats
// TODO support Row (JAVA_OBJECT) in Tempto and switch to QueryAssert
Assertions.assertThat(onTrino().executeQuery(format("SELECT * FROM %s", trinoTableName)).rows())
.containsOnly(List.of(
rowBuilder()
// Rename does not change id
.addField("renamed", null)
.addField("keep", 2L)
.addField("CaseSensitive", 4L)
// Dropping and re-adding changes id
.addField("drop_and_add", 3L)
.build(),
1001L));
}
else {
// TODO support Row (JAVA_OBJECT) in Tempto and switch to QueryAssert
Assertions.assertThat(onTrino().executeQuery(format("SELECT * FROM %s", trinoTableName)).rows())
.containsOnly(List.of(
rowBuilder()
// Rename does not change id
.addField("renamed", 1L)
.addField("keep", 2L)
.addField("CaseSensitive", 4L)
// Dropping and re-adding changes id
.addField("drop_and_add", null)
.build(),
1001L));
}
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS})
public void testTrinoShowingSparkCreatedTables()
{
String sparkTable = "test_table_listing_for_spark";
String trinoTable = "test_table_listing_for_trino";
onSpark().executeQuery(format("CREATE TABLE %s (_integer INTEGER ) USING ICEBERG", sparkTableName(sparkTable)));
onTrino().executeQuery(format("CREATE TABLE %s (_integer INTEGER )", trinoTableName(trinoTable)));
assertThat(onTrino().executeQuery(format("SHOW TABLES FROM %s LIKE '%s'", TEST_SCHEMA_NAME, "test_table_listing_for_%")))
.containsOnly(row(sparkTable), row(trinoTable));
onSpark().executeQuery("DROP TABLE " + sparkTableName(sparkTable));
onTrino().executeQuery("DROP TABLE " + trinoTableName(trinoTable));
}
@Test(groups = {ICEBERG, PROFILE_SPECIFIC_TESTS}, dataProvider = "storageFormats")
public void testTrinoWritingDataWithObjectStorageLocationProvider(StorageFormat storageFormat)
{
String baseTableName = "test_object_storage_location_provider_" + storageFormat;
String sparkTableName = sparkTableName(baseTableName);
String trinoTableName = trinoTableName(baseTableName);
String dataPath = "hdfs://hadoop-master:9000/user/hive/warehouse/test_object_storage_location_provider/obj-data";
onSpark().executeQuery(format("CREATE TABLE %s (_string STRING, _bigint BIGINT) USING ICEBERG TBLPROPERTIES (" +
"'write.object-storage.enabled'=true," +
"'write.object-storage.path'='%s'," +
"'write.format.default' = '%s')",
sparkTableName, dataPath, storageFormat));
onTrino().executeQuery(format("INSERT INTO %s VALUES ('a_string', 1000000000000000)", trinoTableName));
Row result = row("a_string", 1000000000000000L);
assertThat(onSpark().executeQuery(format("SELECT _string, _bigint FROM %s", sparkTableName))).containsOnly(result);
assertThat(onTrino().executeQuery(format("SELECT _string, _bigint FROM %s", trinoTableName))).containsOnly(result);
QueryResult queryResult = onTrino().executeQuery(format("SELECT file_path FROM %s", trinoTableName("\"" + baseTableName + "$files\"")));
assertThat(queryResult).hasRowsCount(1).hasColumnsCount(1);
assertTrue(((String) queryResult.row(0).get(0)).contains(dataPath));
// TODO: support path override in Iceberg table creation: https://github.com/trinodb/trino/issues/8861
assertQueryFailure(() -> onTrino().executeQuery("DROP TABLE " + trinoTableName))
.hasMessageContaining("contains Iceberg path override properties and cannot be dropped from Trino");
onSpark().executeQuery("DROP TABLE " + sparkTableName);
}
private static String sparkTableName(String tableName)
{
return format("%s.%s.%s", SPARK_CATALOG, TEST_SCHEMA_NAME, tableName);
}
private static String trinoTableName(String tableName)
{
return format("%s.%s.%s", TRINO_CATALOG, TEST_SCHEMA_NAME, tableName);
}
private io.trino.jdbc.Row.Builder rowBuilder()
{
return io.trino.jdbc.Row.builder();
}
@DataProvider
public static Object[][] storageFormats()
{
return Stream.of(StorageFormat.values())
.filter(StorageFormat::isSupportedInTrino)
.map(storageFormat -> new Object[] {storageFormat})
.toArray(Object[][]::new);
}
@DataProvider
public static Object[][] unsupportedStorageFormats()
{
return Stream.of(StorageFormat.values())
.filter(storageFormat -> !storageFormat.isSupportedInTrino())
.map(storageFormat -> new Object[] {storageFormat})
.toArray(Object[][]::new);
}
public enum StorageFormat
{
PARQUET,
ORC,
AVRO,
/**/;
public boolean isSupportedInTrino()
{
// TODO (https://github.com/trinodb/trino/issues/1324) not supported in Trino yet
// - remove testTrinoWithUnsupportedFileFormat once all formats are supported
return this != AVRO;
}
}
public enum CreateMode
{
CREATE_TABLE_AND_INSERT,
CREATE_TABLE_AS_SELECT,
CREATE_TABLE_WITH_NO_DATA_AND_INSERT,
}
}
| Add Iceberg tests for special characters in partition values
Based on the Hive TestTablePartitioningWithSpecialChars
| testing/trino-product-tests/src/main/java/io/trino/tests/product/iceberg/TestIcebergSparkCompatibility.java | Add Iceberg tests for special characters in partition values |
|
Java | apache-2.0 | cc13530bf4c98c1f6f8f91ffd2ffca86914f0bf2 | 0 | darranl/directory-server,apache/directory-server,darranl/directory-server,lucastheisen/apache-directory-server,drankye/directory-server,drankye/directory-server,lucastheisen/apache-directory-server,apache/directory-server | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.core.schema.registries.synchronizers;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.naming.NamingException;
import org.apache.directory.server.core.CoreSession;
import org.apache.directory.server.core.OperationManager;
import org.apache.directory.server.core.entry.DefaultServerAttribute;
import org.apache.directory.server.core.entry.ServerAttribute;
import org.apache.directory.server.core.entry.ServerEntry;
import org.apache.directory.server.core.entry.ServerEntryUtils;
import org.apache.directory.server.core.entry.ServerModification;
import org.apache.directory.server.core.interceptor.context.ModifyOperationContext;
import org.apache.directory.server.core.partition.ByPassConstants;
import org.apache.directory.shared.ldap.constants.MetaSchemaConstants;
import org.apache.directory.shared.ldap.constants.SchemaConstants;
import org.apache.directory.shared.ldap.entry.EntryAttribute;
import org.apache.directory.shared.ldap.entry.Modification;
import org.apache.directory.shared.ldap.entry.ModificationOperation;
import org.apache.directory.shared.ldap.entry.Value;
import org.apache.directory.shared.ldap.exception.LdapInvalidNameException;
import org.apache.directory.shared.ldap.exception.LdapOperationNotSupportedException;
import org.apache.directory.shared.ldap.message.ResultCodeEnum;
import org.apache.directory.shared.ldap.name.LdapDN;
import org.apache.directory.shared.ldap.name.Rdn;
import org.apache.directory.shared.ldap.schema.AttributeType;
import org.apache.directory.shared.ldap.schema.SchemaManager;
import org.apache.directory.shared.ldap.schema.SchemaObject;
import org.apache.directory.shared.ldap.schema.SchemaObjectType;
import org.apache.directory.shared.ldap.schema.SchemaObjectWrapper;
import org.apache.directory.shared.ldap.schema.registries.AttributeTypeRegistry;
import org.apache.directory.shared.ldap.schema.registries.DefaultSchemaObjectRegistry;
import org.apache.directory.shared.ldap.schema.registries.Registries;
import org.apache.directory.shared.ldap.schema.registries.Schema;
import org.apache.directory.shared.ldap.util.DateUtils;
import org.apache.directory.shared.ldap.util.StringTools;
import org.apache.directory.shared.schema.loader.ldif.SchemaEntityFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class handle modifications made on a global schema. Modifications made
* on SchemaObjects are handled by the specific shcemaObject synchronizers.
*
* @TODO poorly implemented - revisit the SchemaChangeHandler for this puppy
* and do it right.
*
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
* @version $Rev$, $Date$
*/
public class SchemaSynchronizer implements RegistrySynchronizer
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( SchemaSynchronizer.class );
private final SchemaEntityFactory factory;
//private final PartitionSchemaLoader loader;
private final SchemaManager schemaManager;
/** The global registries */
private final Registries registries;
/** The m-disable AttributeType */
private final AttributeType disabledAT;
/** The CN attributeType */
private final AttributeType cnAT;
/** The m-dependencies AttributeType */
private final AttributeType dependenciesAT;
/** The modifiersName AttributeType */
private final AttributeType modifiersNameAT;
/** The modifyTimestamp AttributeType */
private final AttributeType modifyTimestampAT;
/** A static DN referencing ou=schema */
private final LdapDN ouSchemaDN;
/**
* Creates and initializes a new instance of Schema synchronizer
*
* @param registries The Registries
* @param loader The schema loader
* @throws Exception If something went wrong
*/
public SchemaSynchronizer( SchemaManager schemaManager ) throws Exception
{
this.registries = schemaManager.getRegistries();
this.schemaManager = schemaManager;
disabledAT = registries.getAttributeTypeRegistry().lookup( MetaSchemaConstants.M_DISABLED_AT );
factory = new SchemaEntityFactory();
cnAT = registries.getAttributeTypeRegistry().lookup( SchemaConstants.CN_AT );
dependenciesAT = registries.getAttributeTypeRegistry()
.lookup( MetaSchemaConstants.M_DEPENDENCIES_AT );
modifiersNameAT = registries.getAttributeTypeRegistry().lookup( SchemaConstants.MODIFIERS_NAME_AT );
modifyTimestampAT = registries.getAttributeTypeRegistry().lookup( SchemaConstants.MODIFY_TIMESTAMP_AT );
ouSchemaDN = new LdapDN( SchemaConstants.OU_SCHEMA );
ouSchemaDN.normalize( registries.getAttributeTypeRegistry().getNormalizerMapping() );
}
/**
* The only modification done on a schema element is on the m-disabled
* attributeType
*
* Depending in the existence of this attribute in the previous entry, we will
* have to update the entry or not.
*/
public boolean modify( ModifyOperationContext opContext, ServerEntry targetEntry, boolean cascade ) throws Exception
{
ServerEntry entry = opContext.getEntry();
List<Modification> mods = opContext.getModItems();
boolean hasModification = SCHEMA_UNCHANGED;
// Check if the entry has a m-disabled attribute
EntryAttribute disabledInEntry = entry.get( disabledAT );
Modification disabledModification = ServerEntryUtils.getModificationItem( mods, disabledAT );
// The attribute might be present, but that does not mean we will change it.
// If it's absent, and if we have it in the previous entry, that mean we want
// to enable the schema
if ( disabledModification != null )
{
// We are trying to modify the m-disabled attribute.
ModificationOperation modification = disabledModification.getOperation();
ServerAttribute attribute = (ServerAttribute)disabledModification.getAttribute();
hasModification = modifyDisable( opContext, modification, attribute, disabledInEntry );
}
else if ( disabledInEntry != null )
{
hasModification = modifyDisable( opContext, ModificationOperation.REMOVE_ATTRIBUTE, null, disabledInEntry );
}
return hasModification;
}
public void moveAndRename( LdapDN oriChildName, LdapDN newParentName, Rdn newRn, boolean deleteOldRn, ServerEntry entry, boolean cascaded ) throws NamingException
{
}
/**
* Handles the addition of a metaSchema object to the schema partition.
*
* @param name the dn of the new metaSchema object
* @param entry the attributes of the new metaSchema object
*/
public void add( ServerEntry entry ) throws Exception
{
LdapDN dn = entry.getDn();
LdapDN parentDn = ( LdapDN ) dn.clone();
parentDn.remove( parentDn.size() - 1 );
parentDn.normalize( registries.getAttributeTypeRegistry().getNormalizerMapping() );
if ( !parentDn.equals( ouSchemaDN ) )
{
throw new LdapInvalidNameException( "The parent dn of a schema should be " + ouSchemaDN.getUpName() + " and not: "
+ parentDn.toNormName(), ResultCodeEnum.NAMING_VIOLATION );
}
// check if the new schema is enabled or disabled
boolean isEnabled = false;
EntryAttribute disabled = entry.get( disabledAT );
if ( disabled == null )
{
// If the attribute is absent, then the schema is enabled by default
isEnabled = true;
}
else if ( ! disabled.contains( "TRUE" ) )
{
isEnabled = true;
}
// check to see that all dependencies are resolved and loaded if this
// schema is enabled, otherwise check that the dependency schemas exist
checkForDependencies( isEnabled, entry );
/*
* There's a slight problem that may result when adding a metaSchema
* object if the addition of the physical entry fails. If the schema
* is enabled when added in the condition tested below, that schema
* is added to the global registries. We need to add this so subsequent
* schema entity additions are loaded into the registries as they are
* added to the schema partition. However if the metaSchema object
* addition fails then we're left with this schema object looking like
* it is enabled in the registries object's schema hash. The effects
* of this are unpredictable.
*
* This whole problem is due to the inability of these handlers to
* react to a failed operation. To fix this we would need some way
* for these handlers to respond to failed operations and revert their
* effects on the registries.
*
* TODO: might want to add a set of failedOnXXX methods to the adapter
* where on failure the schema service calls the schema manager and it
* calls the appropriate methods on the respective handler. This way
* the schema manager can rollback registry changes when LDAP operations
* fail.
*/
if ( isEnabled )
{
Schema schema = factory.getSchema( entry );
registries.schemaLoaded( schema );
}
}
/**
* Called to react to the deletion of a metaSchema object. This method
* simply removes the schema from the loaded schema map of the global
* registries.
*
* @param name the dn of the metaSchema object being deleted
* @param entry the attributes of the metaSchema object
*/
public void delete( ServerEntry entry, boolean cascade ) throws Exception
{
EntryAttribute cn = entry.get( cnAT );
String schemaName = cn.getString();
// Before allowing a schema object to be deleted we must check
// to make sure it's not depended upon by another schema
/*Set<String> dependents = schemaManager.listDependentSchemaNames( schemaName );
if ( ! dependents.isEmpty() )
{
String msg = "Cannot delete schema that has dependents: " + dependents;
LOG.warn( msg );
throw new LdapOperationNotSupportedException(
msg,
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
// no need to check if schema is enabled or disabled here
// if not in the loaded set there will be no negative effect
registries.schemaUnloaded( schemaManager.getSchema( schemaName ) );
*/
}
/**
* Responds to the rdn (commonName) of the metaSchema object being
* changed. Changes all the schema entities associated with the
* renamed schema so they now map to a new schema name.
*
* @param name the dn of the metaSchema object renamed
* @param entry the entry of the metaSchema object before the rename
* @param newRdn the new commonName of the metaSchema object
*/
public void rename( ServerEntry entry, Rdn newRdn, boolean cascade ) throws Exception
{
String rdnAttribute = newRdn.getUpType();
String rdnAttributeOid = registries.getAttributeTypeRegistry().getOidByName( rdnAttribute );
if ( ! rdnAttributeOid.equals( cnAT.getOid() ) )
{
throw new LdapOperationNotSupportedException(
"Cannot allow rename with rdnAttribute set to "
+ rdnAttribute + ": cn must be used instead." ,
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
/*
* This operation has to do the following:
*
* [1] check and make sure there are no dependent schemas on the
* one being renamed - if so an exception should result
*
* [2] make non-schema object registries modify the mapping
* for their entities: non-schema object registries contain
* objects that are not SchemaObjects and hence do not carry
* their schema within the object as a property
*
* [3] make schema object registries do the same but the way
* they do them will be different since these objects will
* need to be replaced or will require a setter for the
* schema name
*/
// step [1]
/*
String schemaName = getSchemaName( entry.getDn() );
Set<String> dependents = schemaManager.listDependentSchemaNames( schemaName );
if ( ! dependents.isEmpty() )
{
throw new LdapOperationNotSupportedException(
"Cannot allow a rename on " + schemaName + " schema while it has depentents.",
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
// check if the new schema is enabled or disabled
boolean isEnabled = false;
EntryAttribute disabled = entry.get( disabledAT );
if ( disabled == null )
{
isEnabled = true;
}
else if ( ! disabled.get().equals( "TRUE" ) )
{
isEnabled = true;
}
if ( ! isEnabled )
{
return;
}
// do steps 2 and 3 if the schema has been enabled and is loaded
// step [2]
String newSchemaName = ( String ) newRdn.getUpValue();
registries.getComparatorRegistry().renameSchema( schemaName, newSchemaName );
registries.getNormalizerRegistry().renameSchema( schemaName, newSchemaName );
registries.getSyntaxCheckerRegistry().renameSchema( schemaName, newSchemaName );
// step [3]
renameSchema( registries.getAttributeTypeRegistry(), schemaName, newSchemaName );
renameSchema( registries.getDitContentRuleRegistry(), schemaName, newSchemaName );
renameSchema( registries.getDitStructureRuleRegistry(), schemaName, newSchemaName );
renameSchema( registries.getMatchingRuleRegistry(), schemaName, newSchemaName );
renameSchema( registries.getMatchingRuleUseRegistry(), schemaName, newSchemaName );
renameSchema( registries.getNameFormRegistry(), schemaName, newSchemaName );
renameSchema( registries.getObjectClassRegistry(), schemaName, newSchemaName );
renameSchema( registries.getLdapSyntaxRegistry(), schemaName, newSchemaName );
*/
}
/**
* Moves are not allowed for metaSchema objects so this always throws an
* UNWILLING_TO_PERFORM LdapException.
*/
public void moveAndRename( LdapDN oriChildName, LdapDN newParentName, String newRn, boolean deleteOldRn,
ServerEntry entry, boolean cascade ) throws NamingException
{
throw new LdapOperationNotSupportedException( "Moving around schemas is not allowed.",
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
/**
* Moves are not allowed for metaSchema objects so this always throws an
* UNWILLING_TO_PERFORM LdapException.
*/
public void move( LdapDN oriChildName, LdapDN newParentName,
ServerEntry entry, boolean cascade ) throws NamingException
{
throw new LdapOperationNotSupportedException( "Moving around schemas is not allowed.",
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
// -----------------------------------------------------------------------
// private utility methods
// -----------------------------------------------------------------------
/**
* Modify the Disable flag (the flag can be set to true or false).
*
* We can ADD, REMOVE or MODIFY this flag. The following matrix expose what will be the consequences
* of this operation, depending on the current state
*
* <pre>
* +-------------------+--------------------+--------------------+
* op/state | TRUE | FALSE | ABSENT |
* +-------+-------+----------------------------------------+--------------------+
* | ADD | TRUE | do nothing | do nothing | disable the schema |
* | +-------+-------------------+--------------------+--------------------+
* | | FALSE | do nothing | do nothing | do nothing |
* +-------+-------+-------------------+--------------------+--------------------+
* |REMOVE | N/A | enable the schema | do nothing | do nothing |
* +-------+-------+-------------------+--------------------+--------------------+
* |MODIFY | TRUE | do nothing | disable the schema | disable the schema |
* | +-------+-------------------+--------------------+--------------------+
* | | FALSE | enable the schema | do nothing | do nothing |
* +-------+-------+-------------------+--------------------+--------------------+
* </pre>
*/
private boolean modifyDisable( ModifyOperationContext opContext, ModificationOperation modOp,
EntryAttribute disabledInMods, EntryAttribute disabledInEntry ) throws Exception
{
LdapDN name = opContext.getDn();
switch ( modOp )
{
/*
* If the user is adding a new m-disabled attribute to an enabled schema,
* we check that the value is "TRUE" and disable that schema if so.
*/
case ADD_ATTRIBUTE :
if ( disabledInEntry == null )
{
if ( "TRUE".equalsIgnoreCase( disabledInMods.getString() ) )
{
return disableSchema( opContext.getSession(), getSchemaName( name ) );
}
}
break;
/*
* If the user is removing the m-disabled attribute we check if the schema is currently
* disabled. If so we enable the schema.
*/
case REMOVE_ATTRIBUTE :
if ( ( disabledInEntry != null ) && ( "TRUE".equalsIgnoreCase( disabledInEntry.getString() ) ) )
{
return enableSchema( getSchemaName( name ) );
}
break;
/*
* If the user is replacing the m-disabled attribute we check if the schema is
* currently disabled and enable it if the new state has it as enabled. If the
* schema is not disabled we disable it if the mods set m-disabled to true.
*/
case REPLACE_ATTRIBUTE :
boolean isCurrentlyDisabled = false;
if ( disabledInEntry != null )
{
isCurrentlyDisabled = "TRUE".equalsIgnoreCase( disabledInEntry.getString() );
}
boolean isNewStateDisabled = false;
if ( disabledInMods != null )
{
isNewStateDisabled = "TRUE".equalsIgnoreCase( disabledInMods.getString() );
}
if ( isCurrentlyDisabled && !isNewStateDisabled )
{
return enableSchema( getSchemaName( name ) );
}
if ( !isCurrentlyDisabled && isNewStateDisabled )
{
return disableSchema( opContext.getSession(), getSchemaName( name ) );
}
break;
default:
throw new IllegalArgumentException( "Unknown modify operation type: " + modOp );
}
return SCHEMA_UNCHANGED;
}
private String getSchemaName( LdapDN schema )
{
return ( String ) schema.getRdn().getValue();
}
/**
* Build the DN to access a schemaObject path for a specific schema
*/
private LdapDN buildDn( SchemaObjectType schemaObjectType, String schemaName ) throws NamingException
{
LdapDN path = new LdapDN(
SchemaConstants.OU_SCHEMA,
"cn=" + schemaName,
schemaObjectType.getRdn()
);
return path;
}
/**
* Disable a schema and update all of its schemaObject
*/
private void disable( SchemaObject schemaObject, CoreSession session, Registries registries )
throws Exception
{
Schema schema = registries.getLoadedSchema( schemaObject.getSchemaName() );
List<Modification> modifications = new ArrayList<Modification>();
// The m-disabled AT
EntryAttribute disabledAttr = new DefaultServerAttribute( disabledAT, "FALSE" );
Modification disabledMod = new ServerModification( ModificationOperation.REPLACE_ATTRIBUTE, disabledAttr );
modifications.add( disabledMod );
// The modifiersName AT
EntryAttribute modifiersNameAttr =
new DefaultServerAttribute( modifiersNameAT, session.getEffectivePrincipal().getName() );
Modification modifiersNameMod = new ServerModification( ModificationOperation.REPLACE_ATTRIBUTE, modifiersNameAttr );
modifications.add( modifiersNameMod );
// The modifyTimestamp AT
EntryAttribute modifyTimestampAttr =
new DefaultServerAttribute( modifyTimestampAT, DateUtils.getGeneralizedTime() );
Modification modifyTimestampMod = new ServerModification( ModificationOperation.REPLACE_ATTRIBUTE, modifyTimestampAttr );
modifications.add( modifyTimestampMod );
// Call the modify operation
LdapDN dn = buildDn( schemaObject.getObjectType(), schemaObject.getName() );
ModifyOperationContext modifyContext = new ModifyOperationContext( session, dn, modifications );
modifyContext.setByPassed( ByPassConstants.BYPASS_ALL_COLLECTION );
OperationManager operationManager =
session.getDirectoryService().getOperationManager();
operationManager.modify( modifyContext );
// Now iterate on all the schemaObject under this schema
for ( SchemaObjectWrapper schemaObjectWrapper : schema.getContent() )
{
}
}
private boolean disableSchema( CoreSession session, String schemaName ) throws Exception
{
Schema schema = registries.getLoadedSchema( schemaName );
if ( schema == null )
{
// This is not possible. We can't enable a schema which is not loaded.
String msg = "Unwilling to enable a not loaded schema: " + schemaName;
LOG.error( msg );
throw new LdapOperationNotSupportedException( msg, ResultCodeEnum.UNWILLING_TO_PERFORM );
}
return schemaManager.disable( schemaName );
/*
// First check that the schema is not already disabled
Map<String, Schema> schemas = registries.getLoadedSchemas();
Schema schema = schemas.get( schemaName );
if ( ( schema == null ) || schema.isDisabled() )
{
// The schema is disabled, do nothing
return SCHEMA_UNCHANGED;
}
Set<String> dependents = schemaManager.listEnabledDependentSchemaNames( schemaName );
if ( ! dependents.isEmpty() )
{
throw new LdapOperationNotSupportedException(
"Cannot disable schema with enabled dependents: " + dependents,
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
schema.disable();
// Use brute force right now : iterate through all the schemaObjects
// searching for those associated with the disabled schema
disableAT( session, schemaName );
Set<SchemaObjectWrapper> content = registries.getLoadedSchema( schemaName ).getContent();
for ( SchemaObjectWrapper schemaWrapper : content )
{
SchemaObject schemaObject = schemaWrapper.get();
System.out.println( "Disabling " + schemaObject.getName() );
}
return SCHEMA_MODIFIED;
*/
}
private void disableAT( CoreSession session, String schemaName )
{
AttributeTypeRegistry atRegistry = registries.getAttributeTypeRegistry();
for ( AttributeType attributeType : atRegistry )
{
if ( schemaName.equalsIgnoreCase( attributeType.getSchemaName() ) )
{
if ( attributeType.isDisabled() )
{
continue;
}
EntryAttribute disable = new DefaultServerAttribute( disabledAT, "TRUE" );
Modification modification =
new ServerModification( ModificationOperation.REPLACE_ATTRIBUTE, disable );
//session.modify( dn, mods, ignoreReferral, log )
}
}
}
/**
* Enabling a schema consist on switching all of its schema element to enable.
* We have to do it on a temporary registries.
*/
private boolean enableSchema( String schemaName ) throws Exception
{
Schema schema = registries.getLoadedSchema( schemaName );
if ( schema == null )
{
// We have to load the schema before enabling it.
schemaManager.loadDisabled( schemaName );
}
return schemaManager.enable( schemaName );
}
/**
* Checks to make sure the dependencies either exist for disabled metaSchemas,
* or exist and are loaded (enabled) for enabled metaSchemas.
*
* @param isEnabled whether or not the new metaSchema is enabled
* @param entry the Attributes for the new metaSchema object
* @throws NamingException if the dependencies do not resolve or are not
* loaded (enabled)
*/
private void checkForDependencies( boolean isEnabled, ServerEntry entry ) throws Exception
{
EntryAttribute dependencies = entry.get( this.dependenciesAT );
if ( dependencies == null )
{
return;
}
if ( isEnabled )
{
// check to make sure all the dependencies are also enabled
Map<String,Schema> loaded = registries.getLoadedSchemas();
for ( Value<?> value:dependencies )
{
String dependency = value.getString();
if ( ! loaded.containsKey( dependency ) )
{
throw new LdapOperationNotSupportedException(
"Unwilling to perform operation on enabled schema with disabled or missing dependencies: "
+ dependency, ResultCodeEnum.UNWILLING_TO_PERFORM );
}
}
}
else
{
for ( Value<?> value:dependencies )
{
String dependency = value.getString();
if ( schemaManager.getLoadedSchema( StringTools.toLowerCase( dependency ) ) == null )
{
throw new LdapOperationNotSupportedException(
"Unwilling to perform operation on schema with missing dependencies: " + dependency,
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
}
}
}
/**
* Used to iterate through SchemaObjects in a DefaultSchemaObjectRegistry and rename
* their schema property to a new schema name.
*
* @param registry the registry whose objects are changed
* @param originalSchemaName the original schema name
* @param newSchemaName the new schema name
*/
private void renameSchema( DefaultSchemaObjectRegistry<? extends SchemaObject> registry, String originalSchemaName, String newSchemaName )
{
Iterator<? extends SchemaObject> list = registry.iterator();
while ( list.hasNext() )
{
SchemaObject obj = list.next();
if ( obj.getSchemaName().equalsIgnoreCase( originalSchemaName ) )
{
obj.setSchemaName( newSchemaName );
}
}
}
}
| core-api/src/main/java/org/apache/directory/server/core/schema/registries/synchronizers/SchemaSynchronizer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.core.schema.registries.synchronizers;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.naming.NamingException;
import org.apache.directory.server.core.CoreSession;
import org.apache.directory.server.core.OperationManager;
import org.apache.directory.server.core.entry.DefaultServerAttribute;
import org.apache.directory.server.core.entry.ServerAttribute;
import org.apache.directory.server.core.entry.ServerEntry;
import org.apache.directory.server.core.entry.ServerEntryUtils;
import org.apache.directory.server.core.entry.ServerModification;
import org.apache.directory.server.core.interceptor.context.ModifyOperationContext;
import org.apache.directory.server.core.partition.ByPassConstants;
import org.apache.directory.shared.ldap.constants.MetaSchemaConstants;
import org.apache.directory.shared.ldap.constants.SchemaConstants;
import org.apache.directory.shared.ldap.entry.EntryAttribute;
import org.apache.directory.shared.ldap.entry.Modification;
import org.apache.directory.shared.ldap.entry.ModificationOperation;
import org.apache.directory.shared.ldap.exception.LdapInvalidNameException;
import org.apache.directory.shared.ldap.exception.LdapOperationNotSupportedException;
import org.apache.directory.shared.ldap.message.ResultCodeEnum;
import org.apache.directory.shared.ldap.name.LdapDN;
import org.apache.directory.shared.ldap.name.Rdn;
import org.apache.directory.shared.ldap.schema.AttributeType;
import org.apache.directory.shared.ldap.schema.SchemaManager;
import org.apache.directory.shared.ldap.schema.SchemaObject;
import org.apache.directory.shared.ldap.schema.SchemaObjectType;
import org.apache.directory.shared.ldap.schema.SchemaObjectWrapper;
import org.apache.directory.shared.ldap.schema.registries.AttributeTypeRegistry;
import org.apache.directory.shared.ldap.schema.registries.DefaultSchemaObjectRegistry;
import org.apache.directory.shared.ldap.schema.registries.Registries;
import org.apache.directory.shared.ldap.schema.registries.Schema;
import org.apache.directory.shared.ldap.util.DateUtils;
import org.apache.directory.shared.schema.loader.ldif.SchemaEntityFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class handle modifications made on a global schema. Modifications made
* on SchemaObjects are handled by the specific shcemaObject synchronizers.
*
* @TODO poorly implemented - revisit the SchemaChangeHandler for this puppy
* and do it right.
*
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
* @version $Rev$, $Date$
*/
public class SchemaSynchronizer implements RegistrySynchronizer
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( SchemaSynchronizer.class );
private final SchemaEntityFactory factory;
//private final PartitionSchemaLoader loader;
private final SchemaManager schemaManager;
/** The global registries */
private final Registries registries;
/** The m-disable AttributeType */
private final AttributeType disabledAT;
/** The CN attributeType */
private final AttributeType cnAT;
/** The m-dependencies AttributeType */
private final AttributeType dependenciesAT;
/** The modifiersName AttributeType */
private final AttributeType modifiersNameAT;
/** The modifyTimestamp AttributeType */
private final AttributeType modifyTimestampAT;
/** A static DN referencing ou=schema */
private final LdapDN ouSchemaDN;
/**
* Creates and initializes a new instance of Schema synchronizer
*
* @param registries The Registries
* @param loader The schema loader
* @throws Exception If something went wrong
*/
public SchemaSynchronizer( SchemaManager schemaManager ) throws Exception
{
this.registries = schemaManager.getRegistries();
this.schemaManager = schemaManager;
disabledAT = registries.getAttributeTypeRegistry().lookup( MetaSchemaConstants.M_DISABLED_AT );
factory = new SchemaEntityFactory();
cnAT = registries.getAttributeTypeRegistry().lookup( SchemaConstants.CN_AT );
dependenciesAT = registries.getAttributeTypeRegistry()
.lookup( MetaSchemaConstants.M_DEPENDENCIES_AT );
modifiersNameAT = registries.getAttributeTypeRegistry().lookup( SchemaConstants.MODIFIERS_NAME_AT );
modifyTimestampAT = registries.getAttributeTypeRegistry().lookup( SchemaConstants.MODIFY_TIMESTAMP_AT );
ouSchemaDN = new LdapDN( SchemaConstants.OU_SCHEMA );
ouSchemaDN.normalize( registries.getAttributeTypeRegistry().getNormalizerMapping() );
}
/**
* The only modification done on a schema element is on the m-disabled
* attributeType
*
* Depending in the existence of this attribute in the previous entry, we will
* have to update the entry or not.
*/
public boolean modify( ModifyOperationContext opContext, ServerEntry targetEntry, boolean cascade ) throws Exception
{
ServerEntry entry = opContext.getEntry();
List<Modification> mods = opContext.getModItems();
boolean hasModification = SCHEMA_UNCHANGED;
// Check if the entry has a m-disabled attribute
EntryAttribute disabledInEntry = entry.get( disabledAT );
Modification disabledModification = ServerEntryUtils.getModificationItem( mods, disabledAT );
// The attribute might be present, but that does not mean we will change it.
// If it's absent, and if we have it in the previous entry, that mean we want
// to enable the schema
if ( disabledModification != null )
{
// We are trying to modify the m-disabled attribute.
ModificationOperation modification = disabledModification.getOperation();
ServerAttribute attribute = (ServerAttribute)disabledModification.getAttribute();
hasModification = modifyDisable( opContext, modification, attribute, disabledInEntry );
}
else if ( disabledInEntry != null )
{
hasModification = modifyDisable( opContext, ModificationOperation.REMOVE_ATTRIBUTE, null, disabledInEntry );
}
return hasModification;
}
public void moveAndRename( LdapDN oriChildName, LdapDN newParentName, Rdn newRn, boolean deleteOldRn, ServerEntry entry, boolean cascaded ) throws NamingException
{
}
/**
* Handles the addition of a metaSchema object to the schema partition.
*
* @param name the dn of the new metaSchema object
* @param entry the attributes of the new metaSchema object
*/
public void add( ServerEntry entry ) throws Exception
{
LdapDN dn = entry.getDn();
LdapDN parentDn = ( LdapDN ) dn.clone();
parentDn.remove( parentDn.size() - 1 );
parentDn.normalize( registries.getAttributeTypeRegistry().getNormalizerMapping() );
if ( !parentDn.equals( ouSchemaDN ) )
{
throw new LdapInvalidNameException( "The parent dn of a schema should be " + ouSchemaDN.getUpName() + " and not: "
+ parentDn.toNormName(), ResultCodeEnum.NAMING_VIOLATION );
}
// check if the new schema is enabled or disabled
boolean isEnabled = false;
EntryAttribute disabled = entry.get( disabledAT );
if ( disabled == null )
{
// If the attribute is absent, then the schema is enabled by default
isEnabled = true;
}
else if ( ! disabled.contains( "TRUE" ) )
{
isEnabled = true;
}
// check to see that all dependencies are resolved and loaded if this
// schema is enabled, otherwise check that the dependency schemas exist
checkForDependencies( isEnabled, entry );
/*
* There's a slight problem that may result when adding a metaSchema
* object if the addition of the physical entry fails. If the schema
* is enabled when added in the condition tested below, that schema
* is added to the global registries. We need to add this so subsequent
* schema entity additions are loaded into the registries as they are
* added to the schema partition. However if the metaSchema object
* addition fails then we're left with this schema object looking like
* it is enabled in the registries object's schema hash. The effects
* of this are unpredictable.
*
* This whole problem is due to the inability of these handlers to
* react to a failed operation. To fix this we would need some way
* for these handlers to respond to failed operations and revert their
* effects on the registries.
*
* TODO: might want to add a set of failedOnXXX methods to the adapter
* where on failure the schema service calls the schema manager and it
* calls the appropriate methods on the respective handler. This way
* the schema manager can rollback registry changes when LDAP operations
* fail.
*/
if ( isEnabled )
{
Schema schema = factory.getSchema( entry );
registries.schemaLoaded( schema );
}
}
/**
* Called to react to the deletion of a metaSchema object. This method
* simply removes the schema from the loaded schema map of the global
* registries.
*
* @param name the dn of the metaSchema object being deleted
* @param entry the attributes of the metaSchema object
*/
public void delete( ServerEntry entry, boolean cascade ) throws Exception
{
EntryAttribute cn = entry.get( cnAT );
String schemaName = cn.getString();
// Before allowing a schema object to be deleted we must check
// to make sure it's not depended upon by another schema
/*Set<String> dependents = schemaManager.listDependentSchemaNames( schemaName );
if ( ! dependents.isEmpty() )
{
String msg = "Cannot delete schema that has dependents: " + dependents;
LOG.warn( msg );
throw new LdapOperationNotSupportedException(
msg,
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
// no need to check if schema is enabled or disabled here
// if not in the loaded set there will be no negative effect
registries.schemaUnloaded( schemaManager.getSchema( schemaName ) );
*/
}
/**
* Responds to the rdn (commonName) of the metaSchema object being
* changed. Changes all the schema entities associated with the
* renamed schema so they now map to a new schema name.
*
* @param name the dn of the metaSchema object renamed
* @param entry the entry of the metaSchema object before the rename
* @param newRdn the new commonName of the metaSchema object
*/
public void rename( ServerEntry entry, Rdn newRdn, boolean cascade ) throws Exception
{
String rdnAttribute = newRdn.getUpType();
String rdnAttributeOid = registries.getAttributeTypeRegistry().getOidByName( rdnAttribute );
if ( ! rdnAttributeOid.equals( cnAT.getOid() ) )
{
throw new LdapOperationNotSupportedException(
"Cannot allow rename with rdnAttribute set to "
+ rdnAttribute + ": cn must be used instead." ,
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
/*
* This operation has to do the following:
*
* [1] check and make sure there are no dependent schemas on the
* one being renamed - if so an exception should result
*
* [2] make non-schema object registries modify the mapping
* for their entities: non-schema object registries contain
* objects that are not SchemaObjects and hence do not carry
* their schema within the object as a property
*
* [3] make schema object registries do the same but the way
* they do them will be different since these objects will
* need to be replaced or will require a setter for the
* schema name
*/
// step [1]
/*
String schemaName = getSchemaName( entry.getDn() );
Set<String> dependents = schemaManager.listDependentSchemaNames( schemaName );
if ( ! dependents.isEmpty() )
{
throw new LdapOperationNotSupportedException(
"Cannot allow a rename on " + schemaName + " schema while it has depentents.",
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
// check if the new schema is enabled or disabled
boolean isEnabled = false;
EntryAttribute disabled = entry.get( disabledAT );
if ( disabled == null )
{
isEnabled = true;
}
else if ( ! disabled.get().equals( "TRUE" ) )
{
isEnabled = true;
}
if ( ! isEnabled )
{
return;
}
// do steps 2 and 3 if the schema has been enabled and is loaded
// step [2]
String newSchemaName = ( String ) newRdn.getUpValue();
registries.getComparatorRegistry().renameSchema( schemaName, newSchemaName );
registries.getNormalizerRegistry().renameSchema( schemaName, newSchemaName );
registries.getSyntaxCheckerRegistry().renameSchema( schemaName, newSchemaName );
// step [3]
renameSchema( registries.getAttributeTypeRegistry(), schemaName, newSchemaName );
renameSchema( registries.getDitContentRuleRegistry(), schemaName, newSchemaName );
renameSchema( registries.getDitStructureRuleRegistry(), schemaName, newSchemaName );
renameSchema( registries.getMatchingRuleRegistry(), schemaName, newSchemaName );
renameSchema( registries.getMatchingRuleUseRegistry(), schemaName, newSchemaName );
renameSchema( registries.getNameFormRegistry(), schemaName, newSchemaName );
renameSchema( registries.getObjectClassRegistry(), schemaName, newSchemaName );
renameSchema( registries.getLdapSyntaxRegistry(), schemaName, newSchemaName );
*/
}
/**
* Moves are not allowed for metaSchema objects so this always throws an
* UNWILLING_TO_PERFORM LdapException.
*/
public void moveAndRename( LdapDN oriChildName, LdapDN newParentName, String newRn, boolean deleteOldRn,
ServerEntry entry, boolean cascade ) throws NamingException
{
throw new LdapOperationNotSupportedException( "Moving around schemas is not allowed.",
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
/**
* Moves are not allowed for metaSchema objects so this always throws an
* UNWILLING_TO_PERFORM LdapException.
*/
public void move( LdapDN oriChildName, LdapDN newParentName,
ServerEntry entry, boolean cascade ) throws NamingException
{
throw new LdapOperationNotSupportedException( "Moving around schemas is not allowed.",
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
// -----------------------------------------------------------------------
// private utility methods
// -----------------------------------------------------------------------
/**
* Modify the Disable flag (the flag can be set to true or false).
*
* We can ADD, REMOVE or MODIFY this flag. The following matrix expose what will be the consequences
* of this operation, depending on the current state
*
* <pre>
* +-------------------+--------------------+--------------------+
* op/state | TRUE | FALSE | ABSENT |
* +-------+-------+----------------------------------------+--------------------+
* | ADD | TRUE | do nothing | do nothing | disable the schema |
* | +-------+-------------------+--------------------+--------------------+
* | | FALSE | do nothing | do nothing | do nothing |
* +-------+-------+-------------------+--------------------+--------------------+
* |REMOVE | N/A | enable the schema | do nothing | do nothing |
* +-------+-------+-------------------+--------------------+--------------------+
* |MODIFY | TRUE | do nothing | disable the schema | disable the schema |
* | +-------+-------------------+--------------------+--------------------+
* | | FALSE | enable the schema | do nothing | do nothing |
* +-------+-------+-------------------+--------------------+--------------------+
* </pre>
*/
private boolean modifyDisable( ModifyOperationContext opContext, ModificationOperation modOp,
EntryAttribute disabledInMods, EntryAttribute disabledInEntry ) throws Exception
{
LdapDN name = opContext.getDn();
switch ( modOp )
{
/*
* If the user is adding a new m-disabled attribute to an enabled schema,
* we check that the value is "TRUE" and disable that schema if so.
*/
case ADD_ATTRIBUTE :
if ( disabledInEntry == null )
{
if ( "TRUE".equalsIgnoreCase( disabledInMods.getString() ) )
{
return disableSchema( opContext.getSession(), getSchemaName( name ) );
}
}
break;
/*
* If the user is removing the m-disabled attribute we check if the schema is currently
* disabled. If so we enable the schema.
*/
case REMOVE_ATTRIBUTE :
if ( ( disabledInEntry != null ) && ( "TRUE".equalsIgnoreCase( disabledInEntry.getString() ) ) )
{
return enableSchema( getSchemaName( name ) );
}
break;
/*
* If the user is replacing the m-disabled attribute we check if the schema is
* currently disabled and enable it if the new state has it as enabled. If the
* schema is not disabled we disable it if the mods set m-disabled to true.
*/
case REPLACE_ATTRIBUTE :
boolean isCurrentlyDisabled = false;
if ( disabledInEntry != null )
{
isCurrentlyDisabled = "TRUE".equalsIgnoreCase( disabledInEntry.getString() );
}
boolean isNewStateDisabled = false;
if ( disabledInMods != null )
{
isNewStateDisabled = "TRUE".equalsIgnoreCase( disabledInMods.getString() );
}
if ( isCurrentlyDisabled && !isNewStateDisabled )
{
return enableSchema( getSchemaName( name ) );
}
if ( !isCurrentlyDisabled && isNewStateDisabled )
{
return disableSchema( opContext.getSession(), getSchemaName( name ) );
}
break;
default:
throw new IllegalArgumentException( "Unknown modify operation type: " + modOp );
}
return SCHEMA_UNCHANGED;
}
private String getSchemaName( LdapDN schema )
{
return ( String ) schema.getRdn().getValue();
}
/**
* Build the DN to access a schemaObject path for a specific schema
*/
private LdapDN buildDn( SchemaObjectType schemaObjectType, String schemaName ) throws NamingException
{
LdapDN path = new LdapDN(
SchemaConstants.OU_SCHEMA,
"cn=" + schemaName,
schemaObjectType.getRdn()
);
return path;
}
/**
* Disable a schema and update all of its schemaObject
*/
private void disable( SchemaObject schemaObject, CoreSession session, Registries registries )
throws Exception
{
Schema schema = registries.getLoadedSchema( schemaObject.getSchemaName() );
List<Modification> modifications = new ArrayList<Modification>();
// The m-disabled AT
EntryAttribute disabledAttr = new DefaultServerAttribute( disabledAT, "FALSE" );
Modification disabledMod = new ServerModification( ModificationOperation.REPLACE_ATTRIBUTE, disabledAttr );
modifications.add( disabledMod );
// The modifiersName AT
EntryAttribute modifiersNameAttr =
new DefaultServerAttribute( modifiersNameAT, session.getEffectivePrincipal().getName() );
Modification modifiersNameMod = new ServerModification( ModificationOperation.REPLACE_ATTRIBUTE, modifiersNameAttr );
modifications.add( modifiersNameMod );
// The modifyTimestamp AT
EntryAttribute modifyTimestampAttr =
new DefaultServerAttribute( modifyTimestampAT, DateUtils.getGeneralizedTime() );
Modification modifyTimestampMod = new ServerModification( ModificationOperation.REPLACE_ATTRIBUTE, modifyTimestampAttr );
modifications.add( modifyTimestampMod );
// Call the modify operation
LdapDN dn = buildDn( schemaObject.getObjectType(), schemaObject.getName() );
ModifyOperationContext modifyContext = new ModifyOperationContext( session, dn, modifications );
modifyContext.setByPassed( ByPassConstants.BYPASS_ALL_COLLECTION );
OperationManager operationManager =
session.getDirectoryService().getOperationManager();
operationManager.modify( modifyContext );
// Now iterate on all the schemaObject under this schema
for ( SchemaObjectWrapper schemaObjectWrapper : schema.getContent() )
{
}
}
private boolean disableSchema( CoreSession session, String schemaName ) throws Exception
{
Schema schema = registries.getLoadedSchema( schemaName );
if ( schema == null )
{
// This is not possible. We can't enable a schema which is not loaded.
String msg = "Unwilling to enable a not loaded schema: " + schemaName;
LOG.error( msg );
throw new LdapOperationNotSupportedException( msg, ResultCodeEnum.UNWILLING_TO_PERFORM );
}
return schemaManager.disable( schemaName );
/*
// First check that the schema is not already disabled
Map<String, Schema> schemas = registries.getLoadedSchemas();
Schema schema = schemas.get( schemaName );
if ( ( schema == null ) || schema.isDisabled() )
{
// The schema is disabled, do nothing
return SCHEMA_UNCHANGED;
}
Set<String> dependents = schemaManager.listEnabledDependentSchemaNames( schemaName );
if ( ! dependents.isEmpty() )
{
throw new LdapOperationNotSupportedException(
"Cannot disable schema with enabled dependents: " + dependents,
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
schema.disable();
// Use brute force right now : iterate through all the schemaObjects
// searching for those associated with the disabled schema
disableAT( session, schemaName );
Set<SchemaObjectWrapper> content = registries.getLoadedSchema( schemaName ).getContent();
for ( SchemaObjectWrapper schemaWrapper : content )
{
SchemaObject schemaObject = schemaWrapper.get();
System.out.println( "Disabling " + schemaObject.getName() );
}
return SCHEMA_MODIFIED;
*/
}
private void disableAT( CoreSession session, String schemaName )
{
AttributeTypeRegistry atRegistry = registries.getAttributeTypeRegistry();
for ( AttributeType attributeType : atRegistry )
{
if ( schemaName.equalsIgnoreCase( attributeType.getSchemaName() ) )
{
if ( attributeType.isDisabled() )
{
continue;
}
EntryAttribute disable = new DefaultServerAttribute( disabledAT, "TRUE" );
Modification modification =
new ServerModification( ModificationOperation.REPLACE_ATTRIBUTE, disable );
//session.modify( dn, mods, ignoreReferral, log )
}
}
}
/**
* Enabling a schema consist on switching all of its schema element to enable.
* We have to do it on a temporary registries.
*/
private boolean enableSchema( String schemaName ) throws Exception
{
Schema schema = registries.getLoadedSchema( schemaName );
if ( schema == null )
{
// We have to load the schema before enabling it.
schemaManager.loadDisabled( schemaName );
}
return schemaManager.enable( schemaName );
}
/**
* Checks to make sure the dependencies either exist for disabled metaSchemas,
* or exist and are loaded (enabled) for enabled metaSchemas.
*
* @param isEnabled whether or not the new metaSchema is enabled
* @param entry the Attributes for the new metaSchema object
* @throws NamingException if the dependencies do not resolve or are not
* loaded (enabled)
*/
private void checkForDependencies( boolean isEnabled, ServerEntry entry ) throws Exception
{
/*
EntryAttribute dependencies = entry.get( this.dependenciesAT );
if ( dependencies == null )
{
return;
}
if ( isEnabled )
{
// check to make sure all the dependencies are also enabled
Map<String,Schema> loaded = registries.getLoadedSchemas();
for ( Value<?> value:dependencies )
{
String dependency = value.getString();
if ( ! loaded.containsKey( dependency ) )
{
throw new LdapOperationNotSupportedException(
"Unwilling to perform operation on enabled schema with disabled or missing dependencies: "
+ dependency, ResultCodeEnum.UNWILLING_TO_PERFORM );
}
}
}
else
{
Set<String> allSchemas = schemaManager.getSchemaNames();
for ( Value<?> value:dependencies )
{
String dependency = value.getString();
if ( ! allSchemas.contains( dependency ) )
{
throw new LdapOperationNotSupportedException(
"Unwilling to perform operation on schema with missing dependencies: " + dependency,
ResultCodeEnum.UNWILLING_TO_PERFORM );
}
}
}
*/
}
/**
* Used to iterate through SchemaObjects in a DefaultSchemaObjectRegistry and rename
* their schema property to a new schema name.
*
* @param registry the registry whose objects are changed
* @param originalSchemaName the original schema name
* @param newSchemaName the new schema name
*/
private void renameSchema( DefaultSchemaObjectRegistry<? extends SchemaObject> registry, String originalSchemaName, String newSchemaName )
{
Iterator<? extends SchemaObject> list = registry.iterator();
while ( list.hasNext() )
{
SchemaObject obj = list.next();
if ( obj.getSchemaName().equalsIgnoreCase( originalSchemaName ) )
{
obj.setSchemaName( newSchemaName );
}
}
}
}
| Uncommented the checkForDependencies method
git-svn-id: 0e35ed151ed664c68b0cbbfc0d55a7f45990ca10@891985 13f79535-47bb-0310-9956-ffa450edef68
| core-api/src/main/java/org/apache/directory/server/core/schema/registries/synchronizers/SchemaSynchronizer.java | Uncommented the checkForDependencies method |
|
Java | apache-2.0 | 1acb26f6c2e0b89114528422cec019a7d03fa747 | 0 | reportportal/service-api,reportportal/service-api,reportportal/service-api,reportportal/service-api,reportportal/service-api | /*
* Copyright 2018 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.demodata.service;
import com.epam.ta.reportportal.auth.ReportPortalUser;
import com.epam.ta.reportportal.dao.LaunchRepository;
import com.epam.ta.reportportal.dao.TestItemRepository;
import com.epam.ta.reportportal.entity.enums.StatusEnum;
import com.epam.ta.reportportal.entity.launch.Launch;
import com.epam.ta.reportportal.exception.ReportPortalException;
import com.epam.ta.reportportal.ws.converter.builders.LaunchBuilder;
import com.epam.ta.reportportal.ws.model.ItemAttributeResource;
import com.epam.ta.reportportal.ws.model.launch.Mode;
import com.epam.ta.reportportal.ws.model.launch.StartLaunchRQ;
import com.google.common.collect.Sets;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.Set;
import static com.epam.ta.reportportal.entity.enums.StatusEnum.PASSED;
import static com.epam.ta.reportportal.ws.model.ErrorType.LAUNCH_NOT_FOUND;
/**
* @author <a href="mailto:[email protected]">Ihar Kahadouski</a>
*/
@Service
public class DemoDataLaunchService {
private final LaunchRepository launchRepository;
private final TestItemRepository testItemRepository;
@Autowired
public DemoDataLaunchService(LaunchRepository launchRepository, TestItemRepository testItemRepository) {
this.launchRepository = launchRepository;
this.testItemRepository = testItemRepository;
}
@Transactional
public Long startLaunch(String name, int i, ReportPortalUser user, ReportPortalUser.ProjectDetails projectDetails) {
StartLaunchRQ rq = new StartLaunchRQ();
rq.setMode(Mode.DEFAULT);
rq.setDescription(ContentUtils.getLaunchDescription());
rq.setName(name);
rq.setStartTime(new Date());
Set<ItemAttributeResource> attributes = Sets.newHashSet(
new ItemAttributeResource("platform", "desktop"),
new ItemAttributeResource(null, "demo"),
new ItemAttributeResource("build", "3.0.1." + i)
);
Launch launch = new LaunchBuilder().addStartRQ(rq)
.addAttributes(attributes)
.addProject(projectDetails.getProjectId())
.addUser(user.getUserId())
.get();
launchRepository.save(launch);
launchRepository.refresh(launch);
return launch.getId();
}
@Transactional
public void finishLaunch(Long launchId, ReportPortalUser user, ReportPortalUser.ProjectDetails projectDetails) {
Launch launch = launchRepository.findById(launchId)
.orElseThrow(() -> new ReportPortalException(LAUNCH_NOT_FOUND, launchId.toString()));
if (testItemRepository.hasItemsInStatusByLaunch(launchId, StatusEnum.IN_PROGRESS)) {
testItemRepository.interruptInProgressItems(launchId);
}
launch = new LaunchBuilder(launch).addEndTime(new Date()).get();
StatusEnum fromStatisticsStatus = PASSED;
if (launchRepository.identifyStatus(launchId)) {
fromStatisticsStatus = StatusEnum.FAILED;
}
launch.setStatus(fromStatisticsStatus);
launchRepository.save(launch);
}
}
| src/main/java/com/epam/ta/reportportal/demodata/service/DemoDataLaunchService.java | /*
* Copyright 2018 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.demodata.service;
import com.epam.ta.reportportal.auth.ReportPortalUser;
import com.epam.ta.reportportal.dao.LaunchRepository;
import com.epam.ta.reportportal.dao.TestItemRepository;
import com.epam.ta.reportportal.entity.enums.StatusEnum;
import com.epam.ta.reportportal.entity.launch.Launch;
import com.epam.ta.reportportal.exception.ReportPortalException;
import com.epam.ta.reportportal.ws.converter.builders.LaunchBuilder;
import com.epam.ta.reportportal.ws.model.ItemAttributeResource;
import com.epam.ta.reportportal.ws.model.launch.Mode;
import com.epam.ta.reportportal.ws.model.launch.StartLaunchRQ;
import com.google.common.collect.Sets;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.Set;
import static com.epam.ta.reportportal.entity.enums.StatusEnum.PASSED;
import static com.epam.ta.reportportal.ws.model.ErrorType.LAUNCH_NOT_FOUND;
/**
* @author <a href="mailto:[email protected]">Ihar Kahadouski</a>
*/
@Service
public class DemoDataLaunchService {
private final LaunchRepository launchRepository;
private final TestItemRepository testItemRepository;
@Autowired
public DemoDataLaunchService(LaunchRepository launchRepository, TestItemRepository testItemRepository) {
this.launchRepository = launchRepository;
this.testItemRepository = testItemRepository;
}
@Transactional
public Long startLaunch(String name, int i, ReportPortalUser user, ReportPortalUser.ProjectDetails projectDetails) {
StartLaunchRQ rq = new StartLaunchRQ();
rq.setMode(Mode.DEFAULT);
rq.setDescription(ContentUtils.getLaunchDescription());
rq.setName(name);
rq.setStartTime(new Date());
Set<ItemAttributeResource> attributes = Sets.newHashSet(
new ItemAttributeResource("platform", "desktop"),
new ItemAttributeResource(null, "demo"),
new ItemAttributeResource("build", "3.0.1." + i)
);
Launch launch = new LaunchBuilder().addStartRQ(rq)
.addAttributes(attributes)
.addProject(projectDetails.getProjectId())
.addUser(user.getUserId())
.get();
launchRepository.save(launch);
return launch.getId();
}
@Transactional
public void finishLaunch(Long launchId, ReportPortalUser user, ReportPortalUser.ProjectDetails projectDetails) {
Launch launch = launchRepository.findById(launchId)
.orElseThrow(() -> new ReportPortalException(LAUNCH_NOT_FOUND, launchId.toString()));
if (testItemRepository.hasItemsInStatusByLaunch(launchId, StatusEnum.IN_PROGRESS)) {
testItemRepository.interruptInProgressItems(launchId);
}
launch = new LaunchBuilder(launch).addEndTime(new Date()).get();
StatusEnum fromStatisticsStatus = PASSED;
if (launchRepository.identifyStatus(launchId)) {
fromStatisticsStatus = StatusEnum.FAILED;
}
launch.setStatus(fromStatisticsStatus);
launchRepository.save(launch);
}
}
| Demo data generation fix
| src/main/java/com/epam/ta/reportportal/demodata/service/DemoDataLaunchService.java | Demo data generation fix |
|
Java | apache-2.0 | 672c876618ac895ed78f460f0280096ebe30dd6d | 0 | macsj200/HopeSearch | package eyeglassesmain;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
public class EyeglassDatabase {
private ArrayList<Glasses> glasses;
public EyeglassDatabase(File file) throws FormattingException{
try {
glasses = Reader.readFile(file);
} catch (FormattingException e) {
throw e;
}
EyeglassesMain.log("Database read into RAM");
}
public ArrayList<Glasses> getGlasses(){
return glasses;
}
/*
*
* This is the main search routine for glasses
*
* Searches through read-in database and returns that array filtered
*
*/
public ArrayList<Glasses> search(double rsph, double rcyl, int raxis, double lsph,
double lcyl, int laxis){
ArrayList<Glasses> hits = new ArrayList<Glasses>();
//Iterate through existing glasses
for(int i = 0; i < glasses.size(); i++){
/*If rcyl of glasses is not between rcyl parameter and rcyl parameter + 0.75 filter out*/
if(!isBetween(glasses.get(i).getRcyl(), rcyl, rcyl + 0.75)){
continue;
}
if(rsph == 0){
/*if rsph search parameter is zero and glasses rsph is not between rsph parameter +/- 0.25 filter out*/
if(!isBetween(glasses.get(i).getRsph(), rsph - 0.25, rsph + 0.25)){
continue;
}
}
if(rsph > 0){
/*if rsph search parameter is greater than zero and glasses rsph is not between rsph parameter and
* rsph parameter - 0.5 filter out
*/
if(!isBetween(glasses.get(i).getRsph(), rsph - 0.5, rsph)){
continue;
}
}
if(rsph < 0){
if(isBetween(rcyl, -1.8, -0.8)){
if(!isBetween(glasses.get(i).getRsph(), rsph - 0.25, rsph + 0.5)){
continue;
}
}
else if(rcyl > -0.8){
if(!isBetween(glasses.get(i).getRsph(), rsph, rsph + 0.5)){
continue;
}
}
else if(rcyl < -1.8){
if(!isBetween(glasses.get(i).getRsph(), rsph - .5, rsph + .5)){
continue;
}
}
}
raxis = raxis % 180;
if(rcyl == 0 || rcyl + 0.25 == 0 || rcyl + 0.5 == 0 || rcyl + 0.75 == 0){
if((glasses.get(i).getRaxis()) != 0){
continue;
}
}
if(raxis + 20 > 180){
if(!(glasses.get(i).getRaxis() <= raxis + 20 - 180 || glasses.get(i).getRaxis() >= raxis - 20)){
continue;
}
}
else if(raxis - 20 < 0){
if(!(glasses.get(i).getRaxis() <= raxis + 20 || glasses.get(i).getRaxis() >= raxis - 20 + 180)){
continue;
}
}
else{
if(!isBetween(glasses.get(i).getRaxis(), raxis - 20, raxis + 20)){
continue;
}
}
if(!isBetween(glasses.get(i).getLcyl(), lcyl, lcyl + 0.75)){
continue;
}
if(lsph == 0){
if(!isBetween(glasses.get(i).getLsph(), lsph - 0.25, lsph + 0.25)){
continue;
}
}
if(lsph > 0){
if(!isBetween(glasses.get(i).getLsph(), lsph - 0.5, lsph)){
continue;
}
}
if(lsph < 0){
if(isBetween(lcyl, -1.8, -0.8)){
if(!isBetween(glasses.get(i).getLsph(), lsph - 0.25, lsph + 0.5)){
continue;
}
}
else if(lcyl > -0.8){
if(!isBetween(glasses.get(i).getLsph(), lsph, lsph + 0.5)){
continue;
}
}
else if(lcyl < -1.8){
if(!isBetween(glasses.get(i).getLsph(), lsph - .5, lsph + .5)){
continue;
}
}
}
laxis = laxis % 180;
if(lcyl == 0 || lcyl + 0.25 == 0 || lcyl + 0.5 == 0 || lcyl + 0.75 == 0){
if(glasses.get(i).getLaxis() != 0){
continue;
}
}
if(laxis + 20 > 180){
if(!(glasses.get(i).getLaxis() <= laxis + 20 - 180 || glasses.get(i).getLaxis() >= laxis - 20)){
continue;
}
}
else if(laxis - 20 < 0){
if(!(glasses.get(i).getLaxis() <= laxis + 20 || glasses.get(i).getLaxis() >= laxis - 20 + 180)){
continue;
}
}
else{
if(!isBetween(glasses.get(i).getLaxis(), laxis - 20, laxis + 20)){
continue;
}
}
hits.add(glasses.get(i));
}
Collections.sort(hits);
return hits;
}
public ArrayList<Glasses> search(String rsph, String rcyl, String raxis, String lsph,
String lcyl, String laxis){
return search(Double.valueOf(rsph), Double.valueOf(rcyl), Integer.valueOf(raxis),
Double.valueOf(lsph), Double.valueOf(lcyl), Integer.valueOf(laxis));
}
public boolean isBetween(double num, double low, double high){
return (num >= low) && (num <= high);
}
}
| source/eyeglassesmain/EyeglassDatabase.java | package eyeglassesmain;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
public class EyeglassDatabase {
private ArrayList<Glasses> glasses;
public EyeglassDatabase(File file) throws FormattingException{
try {
glasses = Reader.readFile(file);
} catch (FormattingException e) {
throw e;
}
EyeglassesMain.log("Database read into RAM");
}
public ArrayList<Glasses> getGlasses(){
return glasses;
}
/*
*
* This is the main search routine for glasses
*
* Searches through read-in database and returns that array filtered
*
*/
public ArrayList<Glasses> search(double rsph, double rcyl, int raxis, double lsph,
double lcyl, int laxis){
ArrayList<Glasses> hits = new ArrayList<Glasses>();
//Iterate through existing glasses
for(int i = 0; i < glasses.size(); i++){
/*If rcyl of glasses is not between rcyl parameter and rcyl parameter + 0.75 filter out*/
if(!isBetween(glasses.get(i).getRcyl(), rcyl, rcyl + 0.75)){
continue;
}
if(rsph == 0){
/*if rsph search parameter is zero and glasses rsph is not between rsph parameter +/- 0.25 filter out*/
if(!isBetween(glasses.get(i).getRsph(), rsph - 0.25, rsph + 0.25)){
continue;
}
}
if(rsph > 0){
/*if rsph search parameter is greater than zero and glasses rsph is not between rsph parameter and
* rsph parameter - 0.5 filter out
*/
if(!isBetween(glasses.get(i).getRsph(), rsph - 0.5, rsph)){
continue;
}
}
if(rsph < 0){
if(isBetween(rcyl, -1.8, -0.8)){
if(!isBetween(glasses.get(i).getRsph(), rsph - 0.25, rsph + 0.5)){
continue;
}
}
else if(rcyl > -0.8){
if(!isBetween(glasses.get(i).getRsph(), rsph, rsph + 0.5)){
continue;
}
}
else if(rcyl < -1.8){
if(!isBetween(glasses.get(i).getRsph(), rsph - .5, rsph + .5)){
continue;
}
}
}
raxis = raxis % 180;
if(rcyl == 0 || rcyl + 0.25 == 0 || rcyl + 0.5 == 0 || rcyl + 0.75 == 0){
raxis = 0;
}
if(rcyl != 0){
if(raxis + 20 > 180){
if(!(glasses.get(i).getRaxis() <= raxis + 20 - 180 || glasses.get(i).getRaxis() >= raxis - 20)){
continue;
}
}
else if(raxis - 20 < 0){
if(!(glasses.get(i).getRaxis() <= raxis + 20 || glasses.get(i).getRaxis() >= raxis - 20 + 180)){
continue;
}
}
else{
if(!isBetween(glasses.get(i).getRaxis(), raxis - 20, raxis + 20)){
continue;
}
}
}
if(!isBetween(glasses.get(i).getLcyl(), lcyl, lcyl + 0.75)){
continue;
}
if(lsph == 0){
if(!isBetween(glasses.get(i).getLsph(), lsph - 0.25, lsph + 0.25)){
continue;
}
}
if(lsph > 0){
if(!isBetween(glasses.get(i).getLsph(), lsph - 0.5, lsph)){
continue;
}
}
if(lsph < 0){
if(isBetween(lcyl, -1.8, -0.8)){
if(!isBetween(glasses.get(i).getLsph(), lsph - 0.25, lsph + 0.5)){
continue;
}
}
else if(lcyl > -0.8){
if(!isBetween(glasses.get(i).getLsph(), lsph, lsph + 0.5)){
continue;
}
}
else if(lcyl < -1.8){
if(!isBetween(glasses.get(i).getLsph(), lsph - .5, lsph + .5)){
continue;
}
}
}
laxis = laxis % 180;
if(lcyl == 0 || lcyl + 0.25 == 0 || lcyl + 0.5 == 0 || lcyl + 0.75 == 0){
laxis = 0;
}
if(laxis != 0){
if(laxis + 20 > 180){
if(!(glasses.get(i).getLaxis() <= laxis + 20 - 180 || glasses.get(i).getLaxis() >= laxis - 20)){
continue;
}
}
else if(laxis - 20 < 0){
if(!(glasses.get(i).getLaxis() <= laxis + 20 || glasses.get(i).getLaxis() >= laxis - 20 + 180)){
continue;
}
}
else{
if(!isBetween(glasses.get(i).getLaxis(), laxis - 20, laxis + 20)){
continue;
}
}
}
hits.add(glasses.get(i));
}
Collections.sort(hits);
return hits;
}
public ArrayList<Glasses> search(String rsph, String rcyl, String raxis, String lsph,
String lcyl, String laxis){
return search(Double.valueOf(rsph), Double.valueOf(rcyl), Integer.valueOf(raxis),
Double.valueOf(lsph), Double.valueOf(lcyl), Integer.valueOf(laxis));
}
public boolean isBetween(double num, double low, double high){
return (num >= low) && (num <= high);
}
}
| Super awesome commit | source/eyeglassesmain/EyeglassDatabase.java | Super awesome commit |
|
Java | apache-2.0 | 9f72deeacb2b4b5e5a7a0cea9c83fccc00432a8d | 0 | naokiur/design-pattern-sample | package jp.ne.naokiur.design.pattern.iterator;
public class ItelatorSample {
private String name;
private String address;
public ItelatorSample(String name, String address) {
this.name = name;
this.address = address;
}
}
| src/main/java/jp/ne/naokiur/design/pattern/iterator/ItelatorSample.java | package jp.ne.naokiur.design.pattern.iterator;
public class ItelatorSample {
}
| Create package for Iterator pattern.
| src/main/java/jp/ne/naokiur/design/pattern/iterator/ItelatorSample.java | Create package for Iterator pattern. |
|
Java | apache-2.0 | 851638f9ee4eb225d5a8bb76b8027a315b150216 | 0 | dbeaver/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,Sargul/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2018 Serge Rider ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.controls.resultset.panel.grouping;
import org.eclipse.jface.action.IContributionManager;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.dnd.DND;
import org.eclipse.swt.dnd.DropTarget;
import org.eclipse.swt.dnd.DropTargetAdapter;
import org.eclipse.swt.dnd.DropTargetEvent;
import org.eclipse.swt.dnd.TextTransfer;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.model.DBPDataSource;
import org.jkiss.dbeaver.model.DBPEvaluationContext;
import org.jkiss.dbeaver.model.data.DBDAttributeBinding;
import org.jkiss.dbeaver.model.sql.SQLDialect;
import org.jkiss.dbeaver.model.sql.SQLUtils;
import org.jkiss.dbeaver.model.struct.DBSDataContainer;
import org.jkiss.dbeaver.model.struct.DBSObject;
import org.jkiss.dbeaver.runtime.ui.DBUserInterface;
import org.jkiss.dbeaver.ui.controls.lightgrid.LightGrid;
import org.jkiss.dbeaver.ui.controls.resultset.IResultSetDecorator;
import org.jkiss.dbeaver.ui.controls.resultset.IResultSetPresentation;
import java.util.ArrayList;
import java.util.List;
/**
* Decorator for grouping panel
*/
public class GroupingResultsDecorator implements IResultSetDecorator {
private GroupingResultsContainer container;
public GroupingResultsDecorator(GroupingResultsContainer container) {
this.container = container;
}
@Override
public long getDecoratorFeatures() {
return FEATURE_NONE;
}
@Override
public String getEmptyDataMessage() {
return "No Groupings";
}
@Override
public String getEmptyDataDescription() {
DBPDataSource dataSource = container.getResultSetController().getDataContainer().getDataSource();
if (dataSource == null) {
return "No connected to database";
}
SQLDialect dialect = SQLUtils.getDialectFromDataSource(dataSource);
if (dialect == null || !dialect.supportsSubqueries()) {
return "Grouping is not supported\nby datasource '" + dataSource.getContainer().getDriver().getFullName() + "'";
} else {
return "Drag-and-drop results column(s) here to create grouping\nPress CONTROL to configure grouping settings";
}
}
@Override
public void fillContributions(IContributionManager contributionManager) {
contributionManager.add(new GroupingPanel.EditColumnsAction(container));
contributionManager.add(new GroupingPanel.DeleteColumnAction(container));
contributionManager.add(new GroupingPanel.ClearGroupingAction(container));
}
@Override
public void registerDragAndDrop(IResultSetPresentation presentation) {
// Register drop target to accept columns dropping
Object oldDropTarget = presentation.getControl().getData(DND.DROP_TARGET_KEY);
if (oldDropTarget instanceof DropTarget) {
((DropTarget) oldDropTarget).dispose();
}
DropTarget dropTarget = new DropTarget(presentation.getControl(), DND.DROP_MOVE | DND.DROP_COPY);
dropTarget.setTransfer(LightGrid.GridColumnTransfer.INSTANCE, TextTransfer.getInstance());
dropTarget.addDropListener(new DropTargetAdapter() {
@Override
public void dragEnter(DropTargetEvent event) {
handleDragEvent(event);
}
@Override
public void dragLeave(DropTargetEvent event) {
handleDragEvent(event);
}
@Override
public void dragOperationChanged(DropTargetEvent event) {
handleDragEvent(event);
}
@Override
public void dragOver(DropTargetEvent event) {
handleDragEvent(event);
}
@Override
public void drop(DropTargetEvent event) {
handleDragEvent(event);
if (event.detail == DND.DROP_MOVE || event.detail == DND.DROP_COPY) {
dropColumns(event);
}
}
@Override
public void dropAccept(DropTargetEvent event) {
handleDragEvent(event);
}
private void handleDragEvent(DropTargetEvent event) {
if (!isDropSupported(event)) {
event.detail = DND.DROP_NONE;
} else {
if (event.detail == DND.DROP_NONE) {
event.detail = DND.DROP_MOVE;
}
}
event.feedback = DND.FEEDBACK_SELECT;
}
private boolean isDropSupported(DropTargetEvent event) {
return true;
// TODO: check type
//ArrayUtils.contains(event.dataTypes, LightGrid.GridColumnTransfer.INSTANCE);
}
@SuppressWarnings("unchecked")
private void dropColumns(DropTargetEvent event) {
if (!(event.data instanceof List)) {
return;
}
List<Object> dropElements = (List<Object>) event.data;
List<String> attributeBindings = new ArrayList<>();
for (Object element : dropElements) {
if (element instanceof DBDAttributeBinding) {
attributeBindings.add(((DBDAttributeBinding) element).getFullyQualifiedName(DBPEvaluationContext.DML));
}
}
if (!attributeBindings.isEmpty()) {
container.addGroupingAttributes(attributeBindings);
}
if (event.detail == DND.DROP_COPY) {
GroupingConfigDialog dialog = new GroupingConfigDialog(container.getResultSetController().getControl().getShell(), container);
if (dialog.open() != IDialogConstants.OK_ID) {
container.clearGrouping();
return;
}
}
try {
container.rebuildGrouping();
} catch (DBException e) {
DBUserInterface.getInstance().showError("Grouping error", "Can't perform grouping query", e);
}
}
});
}
}
| plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/panel/grouping/GroupingResultsDecorator.java | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2018 Serge Rider ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.controls.resultset.panel.grouping;
import org.eclipse.jface.action.IContributionManager;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.dnd.DND;
import org.eclipse.swt.dnd.DropTarget;
import org.eclipse.swt.dnd.DropTargetAdapter;
import org.eclipse.swt.dnd.DropTargetEvent;
import org.eclipse.swt.dnd.TextTransfer;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.model.DBPEvaluationContext;
import org.jkiss.dbeaver.model.data.DBDAttributeBinding;
import org.jkiss.dbeaver.runtime.ui.DBUserInterface;
import org.jkiss.dbeaver.ui.controls.lightgrid.LightGrid;
import org.jkiss.dbeaver.ui.controls.resultset.IResultSetDecorator;
import org.jkiss.dbeaver.ui.controls.resultset.IResultSetPresentation;
import java.util.ArrayList;
import java.util.List;
/**
* Decorator for grouping panel
*/
public class GroupingResultsDecorator implements IResultSetDecorator {
private GroupingResultsContainer container;
public GroupingResultsDecorator(GroupingResultsContainer container) {
this.container = container;
}
@Override
public long getDecoratorFeatures() {
return FEATURE_NONE;
}
@Override
public String getEmptyDataMessage() {
return "No Groupings";
}
@Override
public String getEmptyDataDescription() {
return "Drag-and-drop results column(s) here to create grouping\nPress CONTROL to configure grouping settings";
}
@Override
public void fillContributions(IContributionManager contributionManager) {
contributionManager.add(new GroupingPanel.EditColumnsAction(container));
contributionManager.add(new GroupingPanel.DeleteColumnAction(container));
contributionManager.add(new GroupingPanel.ClearGroupingAction(container));
}
@Override
public void registerDragAndDrop(IResultSetPresentation presentation) {
// Register drop target to accept columns dropping
Object oldDropTarget = presentation.getControl().getData(DND.DROP_TARGET_KEY);
if (oldDropTarget instanceof DropTarget) {
((DropTarget) oldDropTarget).dispose();
}
DropTarget dropTarget = new DropTarget(presentation.getControl(), DND.DROP_MOVE | DND.DROP_COPY);
dropTarget.setTransfer(LightGrid.GridColumnTransfer.INSTANCE, TextTransfer.getInstance());
dropTarget.addDropListener(new DropTargetAdapter() {
@Override
public void dragEnter(DropTargetEvent event) {
handleDragEvent(event);
}
@Override
public void dragLeave(DropTargetEvent event) {
handleDragEvent(event);
}
@Override
public void dragOperationChanged(DropTargetEvent event) {
handleDragEvent(event);
}
@Override
public void dragOver(DropTargetEvent event) {
handleDragEvent(event);
}
@Override
public void drop(DropTargetEvent event) {
handleDragEvent(event);
if (event.detail == DND.DROP_MOVE || event.detail == DND.DROP_COPY) {
dropColumns(event);
}
}
@Override
public void dropAccept(DropTargetEvent event) {
handleDragEvent(event);
}
private void handleDragEvent(DropTargetEvent event) {
if (!isDropSupported(event)) {
event.detail = DND.DROP_NONE;
} else {
if (event.detail == DND.DROP_NONE) {
event.detail = DND.DROP_MOVE;
}
}
event.feedback = DND.FEEDBACK_SELECT;
}
private boolean isDropSupported(DropTargetEvent event) {
return true;
// TODO: check type
//ArrayUtils.contains(event.dataTypes, LightGrid.GridColumnTransfer.INSTANCE);
}
@SuppressWarnings("unchecked")
private void dropColumns(DropTargetEvent event) {
if (!(event.data instanceof List)) {
return;
}
List<Object> dropElements = (List<Object>) event.data;
List<String> attributeBindings = new ArrayList<>();
for (Object element : dropElements) {
if (element instanceof DBDAttributeBinding) {
attributeBindings.add(((DBDAttributeBinding) element).getFullyQualifiedName(DBPEvaluationContext.DML));
}
}
if (!attributeBindings.isEmpty()) {
container.addGroupingAttributes(attributeBindings);
}
if (event.detail == DND.DROP_COPY) {
GroupingConfigDialog dialog = new GroupingConfigDialog(container.getResultSetController().getControl().getShell(), container);
if (dialog.open() != IDialogConstants.OK_ID) {
container.clearGrouping();
return;
}
}
try {
container.rebuildGrouping();
} catch (DBException e) {
DBUserInterface.getInstance().showError("Grouping error", "Can't perform grouping query", e);
}
}
});
}
}
| Grouping - check for subselects support
Former-commit-id: cc9ea73cb634fb6219f2fce67994138d3b7ee2ad | plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/panel/grouping/GroupingResultsDecorator.java | Grouping - check for subselects support |
|
Java | apache-2.0 | 4d3104ccfa22b92397bdae867447a1d797bb041e | 0 | davidw/hecl | /* Copyright 2005-2007 Wojciech Kocjan, David N. Welton
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.util.Vector;
import org.hecl.Interp;
import org.hecl.Thing;
import org.hecl.ListThing;
import org.hecl.HeclException;
import org.hecl.HeclTask;
import org.hecl.Command;
import org.hecl.ObjectThing;
import org.hecl.HeclException;
import org.hecl.IntThing;
import org.hecl.ClassCommand;
import org.hecl.ClassCommandInfo;
import org.hecl.files.FileCmds;
import org.hecl.files.HeclFile;
import org.hecl.load.LoadCmd;
import org.hecl.net.Base64Cmd;
import org.hecl.net.HttpCmd;
import org.hecl.java.HeclJavaCmd;
/**
* <code>Hecl</code> - this class implements the main Hecl command
* line interpreter.
*
* @author <a href="mailto:[email protected]">David N. Welton</a>
* @version 1.0
*/
public class Hecl {
/**
* <code>main</code> is what actually runs things.
*
* @param args a <code>String[]</code> value
*/
public static void main(String[] args) {
Interp interp = null;
try {
interp = new Interp();
} catch (HeclException he) {
System.err.println("Error initializing the Hecl interpreter: " + he);
System.exit(1);
}
try {
int i;
/* Add the standard packages in. */
FileCmds.load(interp);
Base64Cmd.load(interp);
HttpCmd.load(interp);
LoadCmd.load(interp);
HeclJavaCmd.load(interp);
Vector argv = new Vector();
for (i = 0; i < args.length; i++) {
//System.out.println("(running " + args[i] + ")");
argv.addElement(new Thing(args[i]));
}
interp.setVar("argv", ListThing.create(argv));
extend(interp);
if(args.length == 1) {
HeclFile.sourceFile(interp, args[0]);
} else {
interp.readEvalPrint(System.in,System.out,System.err);
}
} catch (Exception e) {
System.err.println("Java exception: " + e);
e.printStackTrace();
} catch (Throwable t) {
System.err.println("Java error: " + t);
t.printStackTrace();
}
interp.terminate();
System.exit(0);
}
/*
static class ObjectCmd implements Command {
ObjectCmd() {}
public Thing cmdCode(Interp ip,Thing[] argv) throws HeclException {
System.err.println("ObjectCmd:");
for(int i=0; i<argv.length; ++i) {
System.err.println("\targv["+i+"]="+argv[i].toString());
}
return ObjectThing.create(this);
}
}
static class ClCmd implements ClassCommand {
static int cnt = 0;
public ClCmd() {}
public Thing method(Interp ip,ClassCommandInfo context,
Thing[] argv) throws HeclException {
System.err.println("method, this="+argv[0].toString()
+", name="+argv[1].toString());
for(int i=2; i<argv.length; ++i) {
System.err.println("\targv["+i+"]="+argv[i].toString());
}
return IntThing.create(--cnt);
}
}
*/
public static void extend(Interp ip) throws HeclException {
/*
ip.addCommand("jones",new ObjectCmd());
ip.addClassCmd(ObjectCmd.class,new ClCmd());
*/
}
}
| trunk/hecl/commandline/Hecl.java | /* Copyright 2005-2006 Wojciech Kocjan, David N. Welton
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.util.Vector;
import org.hecl.Interp;
import org.hecl.Thing;
import org.hecl.ListThing;
import org.hecl.HeclException;
import org.hecl.HeclTask;
import org.hecl.Command;
import org.hecl.ObjectThing;
import org.hecl.HeclException;
import org.hecl.IntThing;
import org.hecl.ClassCommand;
import org.hecl.ClassCommandInfo;
import org.hecl.files.FileCmds;
import org.hecl.files.HeclFile;
import org.hecl.load.LoadCmd;
import org.hecl.net.Base64Cmd;
import org.hecl.net.HttpCmd;
/**
* <code>Hecl</code> - this class implements the main Hecl command
* line interpreter.
*
* @author <a href="mailto:[email protected]">David N. Welton</a>
* @version 1.0
*/
public class Hecl {
/**
* <code>main</code> is what actually runs things.
*
* @param args a <code>String[]</code> value
*/
public static void main(String[] args) {
Interp interp = null;
try {
interp = new Interp();
} catch (HeclException he) {
System.err.println("Error initializing the Hecl interpreter: " + he);
System.exit(1);
}
try {
int i;
/* Add the standard packages in. */
FileCmds.load(interp);
Base64Cmd.load(interp);
HttpCmd.load(interp);
LoadCmd.load(interp);
Vector argv = new Vector();
for (i = 0; i < args.length; i++) {
//System.out.println("(running " + args[i] + ")");
argv.addElement(new Thing(args[i]));
}
interp.setVar("argv", ListThing.create(argv));
extend(interp);
if(args.length == 1) {
HeclFile.sourceFile(interp, args[0]);
} else {
interp.readEvalPrint(System.in,System.out,System.err);
}
} catch (Exception e) {
System.err.println("Java exception: " + e);
e.printStackTrace();
} catch (Throwable t) {
System.err.println("Java error: " + t);
t.printStackTrace();
}
interp.terminate();
System.exit(0);
}
/*
static class ObjectCmd implements Command {
ObjectCmd() {}
public Thing cmdCode(Interp ip,Thing[] argv) throws HeclException {
System.err.println("ObjectCmd:");
for(int i=0; i<argv.length; ++i) {
System.err.println("\targv["+i+"]="+argv[i].toString());
}
return ObjectThing.create(this);
}
}
static class ClCmd implements ClassCommand {
static int cnt = 0;
public ClCmd() {}
public Thing method(Interp ip,ClassCommandInfo context,
Thing[] argv) throws HeclException {
System.err.println("method, this="+argv[0].toString()
+", name="+argv[1].toString());
for(int i=2; i<argv.length; ++i) {
System.err.println("\targv["+i+"]="+argv[i].toString());
}
return IntThing.create(--cnt);
}
}
*/
public static void extend(Interp ip) throws HeclException {
/*
ip.addCommand("jones",new ObjectCmd());
ip.addClassCmd(ObjectCmd.class,new ClCmd());
*/
}
}
| * Hecl.java: Load HeclJava integration.
| trunk/hecl/commandline/Hecl.java | * Hecl.java: Load HeclJava integration. |
|
Java | bsd-2-clause | b272f8c04fe55f8fcd0ff06efc73628581f8242f | 0 | samynk/DArtE | package dae.io.readers;
import com.jme3.scene.Node;
import dae.components.ComponentType;
import dae.components.PrefabComponent;
import dae.io.XMLUtils;
import dae.prefabs.Prefab;
import dae.prefabs.PropertyReflector;
import dae.prefabs.ReflectionManager;
import dae.prefabs.UnresolvedReferencePrefab;
import dae.prefabs.parameters.BaseTypeParameter;
import dae.prefabs.parameters.BooleanParameter;
import dae.prefabs.parameters.ChoiceParameter;
import dae.prefabs.parameters.ColorParameter;
import dae.prefabs.parameters.EnumListParameter;
import dae.prefabs.parameters.FileParameter;
import dae.prefabs.parameters.Float2Parameter;
import dae.prefabs.parameters.Float3Parameter;
import dae.prefabs.parameters.FloatParameter;
import dae.prefabs.parameters.IntParameter;
import dae.prefabs.parameters.ListParameter;
import dae.prefabs.parameters.ObjectParameter;
import dae.prefabs.parameters.Parameter;
import dae.prefabs.parameters.RangeParameter;
import dae.prefabs.parameters.TextParameter;
import dae.prefabs.types.ObjectTypeInstance;
import java.util.HashMap;
/**
* @author Koen Samyn
*/
public class DefaultPrefabImporter implements PrefabTextImporter {
private Node levelNode;
private HashMap<Class, ParameterParser> parameterMap =
new HashMap<Class, ParameterParser>();
/**
* Creates a new DefaultPrefabImporter object.
*/
public DefaultPrefabImporter() {
parameterMap.put(FloatParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return Float.parseFloat(value);
}
});
parameterMap.put(BooleanParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return Boolean.parseBoolean(value);
}
});
parameterMap.put(RangeParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return Float.parseFloat(value);
}
});
parameterMap.put(IntParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return Integer.parseInt(value);
}
});
parameterMap.put(TextParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return value;
}
});
parameterMap.put(Float2Parameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return XMLUtils.parseFloat2(value);
}
});
parameterMap.put(Float3Parameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return XMLUtils.parseFloat3(value);
}
});
parameterMap.put(FileParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return value;
}
});
parameterMap.put(ChoiceParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return value;
}
});
parameterMap.put(EnumListParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
EnumListParameter elp = (EnumListParameter) p;
return elp.getEnum(value);
}
});
parameterMap.put(BaseTypeParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String id) {
// find the object in the scene with the given id.
return levelNode.getChild(id);
}
});
parameterMap.put(ObjectParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String id) {
Object prefab = levelNode.getChild(id);
if (prefab == null) {
UnresolvedReferencePrefab ur = new UnresolvedReferencePrefab();
ur.setReference(parent, p, id);
prefab = ur;
}
return prefab;
}
});
parameterMap.put(ColorParameter.class,new ParameterParser(){
public Object parseParameter(Object parent, Parameter p, String id) {
// find the object in the scene with the given id.
return XMLUtils.parseColor(id);
}
});
}
/**
* Sets the root node for this importer. Can be used to find objects in the
* game.
*
* @param rootNode the rootnode to find.
*/
public void setRootNode(Node rootNode) {
this.levelNode = rootNode;
}
/**
* Parses a parameter and sets the value on the correct property of the
* component.
*
* @param prefab the prefab to set the property on.
* @param p the prefab component to set the property on.
* @param ct the component type of the PrefabComponent.
* @param id the id of the parameter.
* @param value the value of the parameter.
*/
@Override
public void parseAndSetParameter(PrefabComponent p, ComponentType ct, String id, String value) {
Parameter parameter = ct.findParameter(id);
if (parameter == null || value == null) {
return;
}
ParameterParser pp = parameterMap.get(parameter.getClass());
if (pp != null) {
Object oValue = pp.parseParameter(p, parameter, value);
if (oValue != null) {
PropertyReflector pr = ReflectionManager.getInstance().getPropertyReflector(p.getClass());
pr.invokeSetMethod(p, id, parameter.convertToObject(oValue));
}
}
}
/**
* Parses a parameter and sets the value on the correct property of the
* component.
*
* @param p the prefab component to set the property on.
* @param ct the component type of the PrefabComponent.
* @param id the id of the parameter.
* @param value the value of the parameter.
*/
@Override
public void parseAndSetParameter(Prefab p, String id, String value) {
Parameter parameter = p.getObjectType().findParameter(id);
if (parameter == null || value == null) {
return;
}
ParameterParser pp = parameterMap.get(parameter.getClass());
if (pp != null) {
Object oValue = pp.parseParameter(p, parameter, value);
if (oValue != null) {
parameter.invokeSet(p, oValue, false);
}
}
}
/**
* Parses a parameter and sets the value on the correct property of the
* component.
*
* @param p the prefab component to set the property on.
* @param ct the component type of the PrefabComponent.
* @param id the id of the parameter.
* @param value the value of the parameter.
*/
public void parseAndSetListParameter(Prefab prefab, String id, String value) {
Parameter parameter = prefab.getObjectType().findParameter(id);
if (parameter == null || value == null || !(parameter instanceof ListParameter)) {
return;
}
ListParameter lp = (ListParameter) parameter;
ParameterParser pp = parameterMap.get(lp.getBaseType().getClass());
if (pp != null) {
Object oValue = pp.parseParameter(prefab, parameter, value);
if (oValue != null) {
lp.addListItem(prefab, oValue);
}
}
}
/**
* Parses the defaultValue parameter and returns the result.
*
* @param p the parameter that has the information about the value to parse.
* @param defaultValue the value to parse.
* @return the parsed object.
*/
public Object parseParameter(Parameter p, String defaultValue) {
ParameterParser pp = parameterMap.get(p.getClass());
return pp.parseParameter(null, p, defaultValue);
}
}
| src/dae/io/readers/DefaultPrefabImporter.java | package dae.io.readers;
import com.jme3.scene.Node;
import dae.components.ComponentType;
import dae.components.PrefabComponent;
import dae.io.XMLUtils;
import dae.prefabs.Prefab;
import dae.prefabs.PropertyReflector;
import dae.prefabs.ReflectionManager;
import dae.prefabs.UnresolvedReferencePrefab;
import dae.prefabs.parameters.BaseTypeParameter;
import dae.prefabs.parameters.BooleanParameter;
import dae.prefabs.parameters.ChoiceParameter;
import dae.prefabs.parameters.ColorParameter;
import dae.prefabs.parameters.EnumListParameter;
import dae.prefabs.parameters.FileParameter;
import dae.prefabs.parameters.Float2Parameter;
import dae.prefabs.parameters.Float3Parameter;
import dae.prefabs.parameters.FloatParameter;
import dae.prefabs.parameters.IntParameter;
import dae.prefabs.parameters.ListParameter;
import dae.prefabs.parameters.ObjectParameter;
import dae.prefabs.parameters.Parameter;
import dae.prefabs.parameters.RangeParameter;
import dae.prefabs.parameters.TextParameter;
import dae.prefabs.types.ObjectTypeInstance;
import java.util.HashMap;
/**
* @author Koen Samyn
*/
public class DefaultPrefabImporter implements PrefabTextImporter {
private Node levelNode;
private HashMap<Class, ParameterParser> parameterMap =
new HashMap<Class, ParameterParser>();
/**
* Creates a new DefaultPrefabImporter object.
*/
public DefaultPrefabImporter() {
parameterMap.put(FloatParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return Float.parseFloat(value);
}
});
parameterMap.put(BooleanParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return Boolean.parseBoolean(value);
}
});
parameterMap.put(RangeParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return Float.parseFloat(value);
}
});
parameterMap.put(IntParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return Float.parseFloat(value);
}
});
parameterMap.put(TextParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return value;
}
});
parameterMap.put(Float2Parameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return XMLUtils.parseFloat2(value);
}
});
parameterMap.put(Float3Parameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return XMLUtils.parseFloat3(value);
}
});
parameterMap.put(FileParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return value;
}
});
parameterMap.put(ChoiceParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
return value;
}
});
parameterMap.put(EnumListParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String value) {
EnumListParameter elp = (EnumListParameter) p;
return elp.getEnum(value);
}
});
parameterMap.put(BaseTypeParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String id) {
// find the object in the scene with the given id.
return levelNode.getChild(id);
}
});
parameterMap.put(ObjectParameter.class, new ParameterParser() {
public Object parseParameter(Object parent, Parameter p, String id) {
Object prefab = levelNode.getChild(id);
if (prefab == null) {
UnresolvedReferencePrefab ur = new UnresolvedReferencePrefab();
ur.setReference(parent, p, id);
prefab = ur;
}
return prefab;
}
});
parameterMap.put(ColorParameter.class,new ParameterParser(){
public Object parseParameter(Object parent, Parameter p, String id) {
// find the object in the scene with the given id.
return XMLUtils.parseColor(id);
}
});
}
/**
* Sets the root node for this importer. Can be used to find objects in the
* game.
*
* @param rootNode the rootnode to find.
*/
public void setRootNode(Node rootNode) {
this.levelNode = rootNode;
}
/**
* Parses a parameter and sets the value on the correct property of the
* component.
*
* @param prefab the prefab to set the property on.
* @param p the prefab component to set the property on.
* @param ct the component type of the PrefabComponent.
* @param id the id of the parameter.
* @param value the value of the parameter.
*/
@Override
public void parseAndSetParameter(PrefabComponent p, ComponentType ct, String id, String value) {
Parameter parameter = ct.findParameter(id);
if (parameter == null || value == null) {
return;
}
ParameterParser pp = parameterMap.get(parameter.getClass());
if (pp != null) {
Object oValue = pp.parseParameter(p, parameter, value);
if (oValue != null) {
PropertyReflector pr = ReflectionManager.getInstance().getPropertyReflector(p.getClass());
pr.invokeSetMethod(p, id, parameter.convertToObject(oValue));
}
}
}
/**
* Parses a parameter and sets the value on the correct property of the
* component.
*
* @param p the prefab component to set the property on.
* @param ct the component type of the PrefabComponent.
* @param id the id of the parameter.
* @param value the value of the parameter.
*/
@Override
public void parseAndSetParameter(Prefab p, String id, String value) {
Parameter parameter = p.getObjectType().findParameter(id);
if (parameter == null || value == null) {
return;
}
ParameterParser pp = parameterMap.get(parameter.getClass());
if (pp != null) {
Object oValue = pp.parseParameter(p, parameter, value);
if (oValue != null) {
parameter.invokeSet(p, oValue, false);
}
}
}
/**
* Parses a parameter and sets the value on the correct property of the
* component.
*
* @param p the prefab component to set the property on.
* @param ct the component type of the PrefabComponent.
* @param id the id of the parameter.
* @param value the value of the parameter.
*/
public void parseAndSetListParameter(Prefab prefab, String id, String value) {
Parameter parameter = prefab.getObjectType().findParameter(id);
if (parameter == null || value == null || !(parameter instanceof ListParameter)) {
return;
}
ListParameter lp = (ListParameter) parameter;
ParameterParser pp = parameterMap.get(lp.getBaseType().getClass());
if (pp != null) {
Object oValue = pp.parseParameter(prefab, parameter, value);
if (oValue != null) {
lp.addListItem(prefab, oValue);
}
}
}
/**
* Parses the defaultValue parameter and returns the result.
*
* @param p the parameter that has the information about the value to parse.
* @param defaultValue the value to parse.
* @return the parsed object.
*/
public Object parseParameter(Parameter p, String defaultValue) {
ParameterParser pp = parameterMap.get(p.getClass());
return pp.parseParameter(null, p, defaultValue);
}
}
| Fixed error with integer parser. | src/dae/io/readers/DefaultPrefabImporter.java | Fixed error with integer parser. |
|
Java | bsd-2-clause | 1bc4112baa7768ad1afcaabd12cbe5f7a9d57600 | 0 | imagej/imagej-legacy,imagej/imagej-legacy,imagej/imagej-legacy,imagej/imagej-legacy | /*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2017 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.legacy;
import java.net.URL;
import net.imagej.patcher.LegacyInjector;
import org.junit.Test;
import org.scijava.util.ClassUtils;
import org.scijava.util.FileUtils;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtConstructor;
import javassist.CtField;
import javassist.CtMethod;
import javassist.NotFoundException;
import javassist.expr.Cast;
import javassist.expr.ConstructorCall;
import javassist.expr.ExprEditor;
import javassist.expr.FieldAccess;
import javassist.expr.Handler;
import javassist.expr.Instanceof;
import javassist.expr.MethodCall;
import javassist.expr.NewArray;
import javassist.expr.NewExpr;
/**
* Verifies that ImageJ 1.x classes are used only via the {@link IJ1Helper} class.
*
* @author Johannes Schindelin
*/
public class ImageJ1EncapsulationTest {
static {
try {
LegacyInjector.preinit();
}
catch (Throwable t) {
t.printStackTrace();
throw new RuntimeException("Got exception (see error log)");
}
}
@Test
public void verifyEncapsulation() throws Exception {
final ClassPool pool = ClassPool.getDefault();
final URL directory = ClassUtils.getLocation(IJ1Helper.class);
final int prefixLength = directory.toString().length();
for (final URL url : FileUtils.listContents(directory)) {
final String path = url.toString().substring(prefixLength);
if (!path.endsWith(".class")) continue;
final String className = path.substring(0, path.length() - 6).replace('/', '.');
if (className.startsWith(IJ1Helper.class.getName()) ||
/* TODO: At least some of them should not need to access ImageJ 1.x classes directly! */
className.startsWith(net.imagej.legacy.DefaultLegacyHooks.class.getName()) ||
className.startsWith(net.imagej.legacy.LegacyImageMap.class.getName()) ||
className.startsWith(net.imagej.legacy.OptionsSynchronizer.class.getName()) ||
className.startsWith(net.imagej.legacy.SwitchToModernMode.class.getName()) ||
className.startsWith(net.imagej.legacy.command.LegacyCommand.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.DatasetToImagePlusConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ImageDisplayToImagePlusConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ImagePlusToDatasetConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ImagePlusToImageDisplayConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ImageTitleToImagePlusConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ResultsTableColumnWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ResultsTableToGenericTableConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ResultsTableWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.StringToImagePlusConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.AbstractMaskPredicateToRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.AbstractPolygonRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.AbstractRoiToMaskPredicateConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.AbstractRoiUnwrapConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.BinaryCompositeMaskPredicateToShapeRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.DefaultRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.IJRealRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.IJRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.RealMaskRealIntervalToImageRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.RoiToMaskIntervalConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.RoiUnwrappers.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ShapeRoiToMaskRealIntervalConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ShapeRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.BoxToRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.BoxWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.RoiToBoxConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.RoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.WritableBoxToRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.EllipsoidToOvalRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.EllipsoidWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.OvalRoiToEllipsoidConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.OvalRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.WritableEllipsoidToOvalRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.IJLineToLineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.IJLineWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.LineToIJLineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.LineWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.WritableLineToIJLineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointMaskToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointMaskWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointRoiToRealPointCollectionConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.RealPointCollectionToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.RealPointCollectionWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.WritablePointMaskToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.WritableRealPointCollectionToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.Polygon2DToPolygonRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.Polygon2DWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.PolygonRoiToPolygon2DConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.PolygonRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.UnmodifiablePolygonRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.WritablePolygon2DToPolygonRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.IrregularPolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiToPolylineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiToRealMaskRealIntervalConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineToPolylineRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.UnmodifiablePolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.WritablePolylineToPolylineRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.display.AbstractImagePlusDisplayViewer.class.getName()) ||
className.startsWith(net.imagej.legacy.display.LegacyImageDisplayService.class.getName()) ||
className.startsWith(net.imagej.legacy.display.LegacyImageDisplayViewer.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.ActiveImagePlusPreprocessor.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.DefaultLegacyOpener.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.IJ1MacroEngine.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.LegacyInitializer.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.ResultsTablePreprocessor.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.RoiManagerPreprocessor.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.AbstractDisplayCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.AbstractImagePlusCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ColorDisplayCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ColorImagePlusCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ColorPixelHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ColorTableHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.CompositeHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.DefaultImageTranslator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.GrayDisplayCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.GrayImagePlusCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.GrayPixelHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.Harmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.LegacyUtils.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.MergedRgbVirtualStack.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.MetadataHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.NameHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.OverlayHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.PlaneHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.PositionHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ResultsTableHarmonizer.class.getName()))
{
continue;
}
try {
final CtClass clazz = pool.get(className);
clazz.instrument(new ImageJ1UsageTester());
} catch (final Exception e) {
throw new RuntimeException("Problem with class " + className, e);
}
}
}
private final class ImageJ1UsageTester extends ExprEditor {
private void test(final CtClass c) {
if (c != null && c.getName().startsWith("ij.")) {
throw new RuntimeException("ImageJ 1.x class used: " + c.getName());
}
}
@Override
public void edit(Cast c) {
try {
test(c.getType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(ConstructorCall c) {
try {
test(c.getConstructor().getDeclaringClass());
final CtConstructor c2 = c.getConstructor();
for (final CtClass c3 : c2.getExceptionTypes()) {
test(c3);
}
for (final CtClass c3 : c2.getParameterTypes()) {
test(c3);
}
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(FieldAccess f) {
try {
final CtField field = f.getField();
test(field.getDeclaringClass());
test(field.getType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(Handler h) {
try {
test(h.getType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(Instanceof i) {
try {
test(i.getType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(MethodCall m) {
try {
final CtMethod m2 = m.getMethod();
test(m2.getDeclaringClass());
test(m2.getReturnType());
for (final CtClass c2 : m2.getExceptionTypes()) {
test(c2);
}
for (final CtClass c2 : m2.getParameterTypes()) {
test(c2);
}
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(NewArray a) {
try {
test(a.getComponentType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(NewExpr e) {
try {
final CtConstructor c = e.getConstructor();
for (final CtClass c2 : c.getExceptionTypes()) {
test(c2);
}
for (final CtClass c2 : c.getParameterTypes()) {
test(c2);
}
}
catch (NotFoundException e2) {
throw new RuntimeException(e2);
}
}
}
}
| src/test/java/net/imagej/legacy/ImageJ1EncapsulationTest.java | /*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2017 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.legacy;
import java.net.URL;
import net.imagej.patcher.LegacyInjector;
import org.junit.Test;
import org.scijava.util.ClassUtils;
import org.scijava.util.FileUtils;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtConstructor;
import javassist.CtField;
import javassist.CtMethod;
import javassist.NotFoundException;
import javassist.expr.Cast;
import javassist.expr.ConstructorCall;
import javassist.expr.ExprEditor;
import javassist.expr.FieldAccess;
import javassist.expr.Handler;
import javassist.expr.Instanceof;
import javassist.expr.MethodCall;
import javassist.expr.NewArray;
import javassist.expr.NewExpr;
/**
* Verifies that ImageJ 1.x classes are used only via the {@link IJ1Helper} class.
*
* @author Johannes Schindelin
*/
public class ImageJ1EncapsulationTest {
static {
try {
LegacyInjector.preinit();
}
catch (Throwable t) {
t.printStackTrace();
throw new RuntimeException("Got exception (see error log)");
}
}
@Test
public void verifyEncapsulation() throws Exception {
final ClassPool pool = ClassPool.getDefault();
final URL directory = ClassUtils.getLocation(IJ1Helper.class);
final int prefixLength = directory.toString().length();
for (final URL url : FileUtils.listContents(directory)) {
final String path = url.toString().substring(prefixLength);
if (!path.endsWith(".class")) continue;
final String className = path.substring(0, path.length() - 6).replace('/', '.');
if (className.startsWith(IJ1Helper.class.getName()) ||
/* TODO: At least some of them should not need to access ImageJ 1.x classes directly! */
className.startsWith(net.imagej.legacy.DefaultLegacyHooks.class.getName()) ||
className.startsWith(net.imagej.legacy.LegacyImageMap.class.getName()) ||
className.startsWith(net.imagej.legacy.OptionsSynchronizer.class.getName()) ||
className.startsWith(net.imagej.legacy.SwitchToModernMode.class.getName()) ||
className.startsWith(net.imagej.legacy.command.LegacyCommand.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.DatasetToImagePlusConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ImageDisplayToImagePlusConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ImagePlusToDatasetConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ImagePlusToImageDisplayConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ImageTitleToImagePlusConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ResultsTableColumnWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ResultsTableToGenericTableConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.ResultsTableWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.StringToImagePlusConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.AbstractMaskPredicateToRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.AbstractPolygonRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.AbstractRoiToMaskPredicateConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.AbstractRoiUnwrapConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.BinaryCompositeMaskPredicateToShapeRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.DefaultRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.DefaultRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.IJRealRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.IJRealRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.IJRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.IJRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.RealMaskRealIntervalToImageRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.RealMaskRealIntervalToImageRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.RoiToMaskIntervalConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.RoiUnwrappers.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ShapeRoiToMaskRealIntervalConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ShapeRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.BoxToRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.BoxToRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.BoxWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.RoiToBoxConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.RoiToBoxConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.RoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.box.WritableBoxToRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.EllipsoidToOvalRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.EllipsoidToOvalRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.EllipsoidWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.OvalRoiToEllipsoidConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.OvalRoiToEllipsoidConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.OvalRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.OvalRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.ellipsoid.WritableEllipsoidToOvalRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.IJLineToLineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.IJLineToLineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.IJLineWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.IJLineWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.LineToIJLineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.LineToIJLineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.LineWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.line.WritableLineToIJLineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointMaskToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointMaskToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointMaskWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointRoiToRealPointCollectionConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointRoiToRealPointCollectionConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.PointRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.RealPointCollectionToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.RealPointCollectionToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.RealPointCollectionWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.WritablePointMaskToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.point.WritableRealPointCollectionToPointRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.Polygon2DToPolygonRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.Polygon2DToPolygonRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.Polygon2DWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.PolygonRoiToPolygon2DConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.PolygonRoiToPolygon2DConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.PolygonRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.PolygonRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.UnmodifiablePolygonRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.UnmodifiablePolygonRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polygon2d.WritablePolygon2DToPolygonRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.IrregularPolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.IrregularPolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiToPolylineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiToPolylineConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiToRealMaskRealIntervalConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiToRealMaskRealIntervalConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineToPolylineRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineToPolylineRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.PolylineWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.UnmodifiablePolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.UnmodifiablePolylineRoiWrapper.class.getName()) ||
className.startsWith(net.imagej.legacy.convert.roi.polyline.WritablePolylineToPolylineRoiConverter.class.getName()) ||
className.startsWith(net.imagej.legacy.display.AbstractImagePlusDisplayViewer.class.getName()) ||
className.startsWith(net.imagej.legacy.display.LegacyImageDisplayService.class.getName()) ||
className.startsWith(net.imagej.legacy.display.LegacyImageDisplayViewer.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.ActiveImagePlusPreprocessor.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.DefaultLegacyOpener.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.IJ1MacroEngine.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.LegacyInitializer.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.ResultsTablePreprocessor.class.getName()) ||
className.startsWith(net.imagej.legacy.plugin.RoiManagerPreprocessor.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.AbstractDisplayCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.AbstractImagePlusCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ColorDisplayCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ColorImagePlusCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ColorPixelHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ColorTableHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.CompositeHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.DefaultImageTranslator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.GrayDisplayCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.GrayImagePlusCreator.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.GrayPixelHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.Harmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.LegacyUtils.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.MergedRgbVirtualStack.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.MetadataHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.NameHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.OverlayHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.PlaneHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.PositionHarmonizer.class.getName()) ||
className.startsWith(net.imagej.legacy.translate.ResultsTableHarmonizer.class.getName()))
{
continue;
}
try {
final CtClass clazz = pool.get(className);
clazz.instrument(new ImageJ1UsageTester());
} catch (final Exception e) {
throw new RuntimeException("Problem with class " + className, e);
}
}
}
private final class ImageJ1UsageTester extends ExprEditor {
private void test(final CtClass c) {
if (c != null && c.getName().startsWith("ij.")) {
throw new RuntimeException("ImageJ 1.x class used: " + c.getName());
}
}
@Override
public void edit(Cast c) {
try {
test(c.getType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(ConstructorCall c) {
try {
test(c.getConstructor().getDeclaringClass());
final CtConstructor c2 = c.getConstructor();
for (final CtClass c3 : c2.getExceptionTypes()) {
test(c3);
}
for (final CtClass c3 : c2.getParameterTypes()) {
test(c3);
}
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(FieldAccess f) {
try {
final CtField field = f.getField();
test(field.getDeclaringClass());
test(field.getType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(Handler h) {
try {
test(h.getType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(Instanceof i) {
try {
test(i.getType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(MethodCall m) {
try {
final CtMethod m2 = m.getMethod();
test(m2.getDeclaringClass());
test(m2.getReturnType());
for (final CtClass c2 : m2.getExceptionTypes()) {
test(c2);
}
for (final CtClass c2 : m2.getParameterTypes()) {
test(c2);
}
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(NewArray a) {
try {
test(a.getComponentType());
}
catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void edit(NewExpr e) {
try {
final CtConstructor c = e.getConstructor();
for (final CtClass c2 : c.getExceptionTypes()) {
test(c2);
}
for (final CtClass c2 : c.getParameterTypes()) {
test(c2);
}
}
catch (NotFoundException e2) {
throw new RuntimeException(e2);
}
}
}
}
| ImageJ1EncapsulationTest: remove duplicates
| src/test/java/net/imagej/legacy/ImageJ1EncapsulationTest.java | ImageJ1EncapsulationTest: remove duplicates |
|
Java | bsd-2-clause | 16755ff0e644eba3d97b4bc35e047585f0f523e8 | 0 | chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio | /*
* Copyright (c) 2003-2007 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme.scene.batch;
import java.io.IOException;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.logging.Logger;
import com.jme.bounding.BoundingVolume;
import com.jme.renderer.ColorRGBA;
import com.jme.renderer.Renderer;
import com.jme.scene.SceneElement;
import com.jme.scene.TriMesh;
import com.jme.scene.VBOInfo;
import com.jme.scene.state.RenderState;
import com.jme.util.export.InputCapsule;
import com.jme.util.export.JMEExporter;
import com.jme.util.export.JMEImporter;
import com.jme.util.export.OutputCapsule;
/**
* <code>SharedBatch</code> allows the sharing of data between multiple nodes.
* A provided TriMesh is used as the model for this node. This allows the user
* to place multiple copies of the same object throughout the scene without
* having to duplicate data. It should be known that any change to the provided
* target mesh will affect the appearance of this mesh, including animations.
* Secondly, the SharedBatch is read only. Any attempt to write to the mesh data
* via set* methods, will result in a warning being logged and nothing else. Any
* changes to the mesh should happened to the target mesh being shared. <br>
* If you plan to use collisions with a <code>SharedBatch</code> it is
* recommended that you disable passing of <code>updateCollisionTree</code>
* calls to the target mesh. This is to prevent multiple calls to the target's
* <code>updateCollisionTree</code> method, from different shared meshes.
* Instead of this method being called from the scenegraph, you can now invoke
* it directly on the target mesh, thus ensuring it will only be invoked once.
* <br>
* <b>Important:</b> It is highly recommended that the Target mesh is NOT
* placed into the scenegraph, as it's translation, rotation and scale are
* replaced by the shared meshes using it before they are rendered. <br>
* <b>Note:</b> Special thanks to Kevin Glass.
*
* @author Mark Powell
* @version $Id: SharedBatch.java,v 1.18 2007-08-27 20:38:32 nca Exp $
*/
public class SharedBatch extends TriangleBatch {
private static final Logger logger = Logger.getLogger(SharedBatch.class
.getName());
private static final long serialVersionUID = 1L;
private TriangleBatch target;
public SharedBatch() {
super();
defaultColor = null;
}
public SharedBatch(TriangleBatch target) {
this();
if ((target.getType() & SceneElement.SHAREDBATCH) != 0) {
setTarget(((SharedBatch) target).getTarget());
} else {
setTarget(target);
}
}
public int getType() {
return SceneElement.TRIANGLEBATCH | SceneElement.GEOMBATCH
| SceneElement.SHAREDBATCH;
}
/**
* <code>setTarget</code> sets the shared data mesh.
*
* @param target
* the TriMesh to share the data.
*/
public void setTarget(TriangleBatch target) {
this.target = target;
for (int i = 0; i < RenderState.RS_MAX_STATE; i++) {
RenderState renderState = this.target.getRenderState(i);
if (renderState != null) {
setRenderState(renderState);
}
}
setCullMode(target.getLocalCullMode());
setLightCombineMode(target.getLocalLightCombineMode());
setRenderQueueMode(target.getLocalRenderQueueMode());
setTextureCombineMode(target.getLocalTextureCombineMode());
setZOrder(target.getZOrder());
}
/**
* <code>getTarget</code> returns the mesh that is being shared by this
* object.
*
* @return the mesh being shared.
*/
public TriangleBatch getTarget() {
return target;
}
/**
* <code>reconstruct</code> is not supported in SharedBatch.
*
* @param vertices
* the new vertices to use.
* @param normals
* the new normals to use.
* @param colors
* the new colors to use.
* @param textureCoords
* the new texture coordinates to use (position 0).
*/
public void reconstruct(FloatBuffer vertices, FloatBuffer normals,
FloatBuffer colors, FloatBuffer textureCoords) {
logger.info("SharedBatch will ignore reconstruct.");
}
/**
* <code>setVBOInfo</code> is not supported in SharedBatch.
*/
public void setVBOInfo(VBOInfo info) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getVBOInfo</code> returns the target mesh's vbo info.
*/
public VBOInfo getVBOInfo() {
return target.getVBOInfo();
}
/**
*
* <code>setSolidColor</code> is not supported by SharedBatch.
*
* @param color
* the color to set.
*/
public void setSolidColor(ColorRGBA color) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>setRandomColors</code> is not supported by SharedBatch.
*/
public void setRandomColors() {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getVertexBuffer</code> returns the float buffer that contains the
* target geometry's vertex information.
*
* @return the float buffer that contains the target geometry's vertex
* information.
*/
public FloatBuffer getVertexBuffer() {
return target.getVertexBuffer();
}
/**
* <code>setVertexBuffer</code> is not supported by SharedBatch.
*
* @param buff
* the new vertex buffer.
*/
public void setVertexBuffer(FloatBuffer buff) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getNormalBuffer</code> retrieves the target geometry's normal
* information as a float buffer.
*
* @return the float buffer containing the target geometry information.
*/
public FloatBuffer getNormalBuffer() {
return target.getNormalBuffer();
}
/**
* <code>setNormalBuffer</code> is not supported by SharedBatch.
*
* @param buff
* the new normal buffer.
*/
public void setNormalBuffer(FloatBuffer buff) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getColorBuffer</code> retrieves the float buffer that contains
* the target geometry's color information.
*
* @return the buffer that contains the target geometry's color information.
*/
public FloatBuffer getColorBuffer() {
return target.getColorBuffer();
}
/**
* <code>setColorBuffer</code> is not supported by SharedBatch.
*
* @param buff
* the new color buffer.
*/
public void setColorBuffer(FloatBuffer buff) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
*
* <code>getIndexAsBuffer</code> retrieves the target's indices array as
* an <code>IntBuffer</code>.
*
* @return the indices array as an <code>IntBuffer</code>.
*/
public IntBuffer getIndexBuffer() {
return target.getIndexBuffer();
}
/**
*
* <code>setIndexBuffer</code> is not supported by SharedBatch.
*
* @param indices
* the index array as an IntBuffer.
*/
public void setIndexBuffer(IntBuffer indices) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
public int getVertexCount() {
return target.getVertexCount();
}
/**
* Returns the number of triangles the target TriMesh contains.
*
* @return The current number of triangles.
*/
public int getTriangleCount() {
return target.getTriangleCount();
}
public void getTriangle(int index, int[] storage) {
target.getTriangle(index, storage);
}
/**
*
* <code>copyTextureCoords</code> is not supported by SharedBatch.
*
* @param fromIndex
* the coordinates to copy.
* @param toIndex
* the texture unit to set them to.
*/
public void copyTextureCoords(int fromIndex, int toIndex) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getTextureBuffers</code> retrieves the target geometry's texture
* information contained within a float buffer array.
*
* @return the float buffers that contain the target geometry's texture
* information.
*/
public ArrayList<FloatBuffer> getTextureBuffers() {
return target.getTextureBuffers();
}
/**
*
* <code>getTextureAsFloatBuffer</code> retrieves the texture buffer of a
* given texture unit.
*
* @param textureUnit
* the texture unit to check.
* @return the texture coordinates at the given texture unit.
*/
public FloatBuffer getTextureBuffer(int textureUnit) {
return target.getTextureBuffer(textureUnit);
}
/**
* <code>setTextureBuffer</code> is not supported by SharedBatch.
*
* @param buff
* the new vertex buffer.
*/
public void setTextureBuffer(FloatBuffer buff) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>setTextureBuffer</code> not supported by SharedBatch
*
* @param buff
* the new vertex buffer.
*/
public void setTextureBuffer(FloatBuffer buff, int position) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* clearBuffers is not supported by SharedBatch
*/
public void clearBuffers() {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
@Override
public void setTangentBuffer(FloatBuffer tangentBuf) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
@Override
public FloatBuffer getTangentBuffer() {
return target.getTangentBuffer();
}
public void setBinormalBuffer(FloatBuffer binormalBuf) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
@Override
public FloatBuffer getBinormalBuffer() {
return target.getBinormalBuffer();
}
/**
* <code>updateWorldBound</code> updates the bounding volume that contains
* this geometry. The location of the geometry is based on the location of
* all this node's parents.
*
* @see com.jme.scene.Spatial#updateWorldBound()
*/
public void updateWorldBound() {
if (target.getModelBound() != null) {
worldBound = target.getModelBound().transform(
parentGeom.getWorldRotation(),
parentGeom.getWorldTranslation(),
parentGeom.getWorldScale(), worldBound);
}
}
/**
* <code>setModelBound</code> sets the bounding object for this geometry.
*
* @param modelBound
* the bounding object for this geometry.
*/
public void setModelBound(BoundingVolume modelBound) {
target.setModelBound(modelBound);
}
/**
* <code>updateBound</code> recalculates the bounding object assigned to
* the geometry. This resets it parameters to adjust for any changes to the
* vertex information.
*
*/
public void updateModelBound() {
if (target.getModelBound() != null) {
target.updateModelBound();
updateWorldBound();
}
}
/**
* returns the model bound of the target object.
*/
public BoundingVolume getModelBound() {
return target.getModelBound();
}
/**
* draw renders the target mesh, at the translation, rotation and scale of
* this shared mesh.
*
* @see com.jme.scene.Spatial#draw(com.jme.renderer.Renderer)
*/
public void draw(Renderer r) {
// if this batch is not enabled, don't bother processing it.
if (!isEnabled()) {
return;
}
if (!r.isProcessingQueue()) {
if (r.checkAndAdd(this))
return;
}
target.parentGeom.getWorldTranslation().set(
parentGeom.getWorldTranslation());
target.parentGeom.getWorldRotation().set(parentGeom.getWorldRotation());
target.parentGeom.getWorldScale().set(parentGeom.getWorldScale());
target.setDefaultColor(getDefaultColor());
System.arraycopy(this.states, 0, target.states, 0, states.length);
r.draw(target);
}
public void write(JMEExporter e) throws IOException {
OutputCapsule capsule = e.getCapsule(this);
capsule.write(target, "target", null);
super.write(e);
}
private static TriMesh motherMesh = null;
public void read(JMEImporter e) throws IOException {
InputCapsule capsule = e.getCapsule(this);
target = (TriangleBatch) capsule.readSavable("target", null);
if (target.parentGeom == null) {
if (motherMesh == null) {
motherMesh = new TriMesh("mother");
motherMesh.clearBatches();
}
motherMesh.addBatch(target);
}
super.read(e);
}
@Override
public void lockMeshes(Renderer r) {
target.lockMeshes(r);
}
@Override
public boolean hasDirtyVertices() {
return target.hasDirtyVertices;
}
public String toString() {
if (target.parentGeom != null && parentGeom != null)
return target.parentGeom.getName() + ": SharedBatch "
+ parentGeom.getBatchIndex(this);
return "orphaned batch";
}
@Override
public ColorRGBA getDefaultColor() {
if (defaultColor == null) {
return target.getDefaultColor();
} else {
return defaultColor;
}
}
}
| src/com/jme/scene/batch/SharedBatch.java | /*
* Copyright (c) 2003-2007 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme.scene.batch;
import java.io.IOException;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.logging.Logger;
import com.jme.bounding.BoundingVolume;
import com.jme.renderer.ColorRGBA;
import com.jme.renderer.Renderer;
import com.jme.scene.SceneElement;
import com.jme.scene.TriMesh;
import com.jme.scene.VBOInfo;
import com.jme.scene.state.RenderState;
import com.jme.util.export.InputCapsule;
import com.jme.util.export.JMEExporter;
import com.jme.util.export.JMEImporter;
import com.jme.util.export.OutputCapsule;
/**
* <code>SharedBatch</code> allows the sharing of data between multiple nodes.
* A provided TriMesh is used as the model for this node. This allows the user
* to place multiple copies of the same object throughout the scene without
* having to duplicate data. It should be known that any change to the provided
* target mesh will affect the appearance of this mesh, including animations.
* Secondly, the SharedBatch is read only. Any attempt to write to the mesh data
* via set* methods, will result in a warning being logged and nothing else. Any
* changes to the mesh should happened to the target mesh being shared. <br>
* If you plan to use collisions with a <code>SharedBatch</code> it is
* recommended that you disable passing of <code>updateCollisionTree</code>
* calls to the target mesh. This is to prevent multiple calls to the target's
* <code>updateCollisionTree</code> method, from different shared meshes.
* Instead of this method being called from the scenegraph, you can now invoke
* it directly on the target mesh, thus ensuring it will only be invoked once.
* <br>
* <b>Important:</b> It is highly recommended that the Target mesh is NOT
* placed into the scenegraph, as it's translation, rotation and scale are
* replaced by the shared meshes using it before they are rendered. <br>
* <b>Note:</b> Special thanks to Kevin Glass.
*
* @author Mark Powell
* @version $id$
*/
public class SharedBatch extends TriangleBatch {
private static final Logger logger = Logger.getLogger(SharedBatch.class
.getName());
private static final long serialVersionUID = 1L;
private TriangleBatch target;
public SharedBatch() {
super();
defaultColor = null;
}
public SharedBatch(TriangleBatch target) {
this();
if ((target.getType() & SceneElement.SHAREDBATCH) != 0) {
setTarget(((SharedBatch) target).getTarget());
} else {
setTarget(target);
}
}
public int getType() {
return SceneElement.TRIANGLEBATCH | SceneElement.GEOMBATCH
| SceneElement.SHAREDBATCH;
}
/**
* <code>setTarget</code> sets the shared data mesh.
*
* @param target
* the TriMesh to share the data.
*/
public void setTarget(TriangleBatch target) {
this.target = target;
for (int i = 0; i < RenderState.RS_MAX_STATE; i++) {
RenderState renderState = this.target.getRenderState(i);
if (renderState != null) {
setRenderState(renderState);
}
}
setCullMode(target.getLocalCullMode());
setLightCombineMode(target.getLocalLightCombineMode());
setRenderQueueMode(target.getLocalRenderQueueMode());
setTextureCombineMode(target.getLocalTextureCombineMode());
setZOrder(target.getZOrder());
}
/**
* <code>getTarget</code> returns the mesh that is being shared by this
* object.
*
* @return the mesh being shared.
*/
public TriangleBatch getTarget() {
return target;
}
/**
* <code>reconstruct</code> is not supported in SharedBatch.
*
* @param vertices
* the new vertices to use.
* @param normals
* the new normals to use.
* @param colors
* the new colors to use.
* @param textureCoords
* the new texture coordinates to use (position 0).
*/
public void reconstruct(FloatBuffer vertices, FloatBuffer normals,
FloatBuffer colors, FloatBuffer textureCoords) {
logger.info("SharedBatch will ignore reconstruct.");
}
/**
* <code>setVBOInfo</code> is not supported in SharedBatch.
*/
public void setVBOInfo(VBOInfo info) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getVBOInfo</code> returns the target mesh's vbo info.
*/
public VBOInfo getVBOInfo() {
return target.getVBOInfo();
}
/**
*
* <code>setSolidColor</code> is not supported by SharedBatch.
*
* @param color
* the color to set.
*/
public void setSolidColor(ColorRGBA color) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>setRandomColors</code> is not supported by SharedBatch.
*/
public void setRandomColors() {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getVertexBuffer</code> returns the float buffer that contains the
* target geometry's vertex information.
*
* @return the float buffer that contains the target geometry's vertex
* information.
*/
public FloatBuffer getVertexBuffer() {
return target.getVertexBuffer();
}
/**
* <code>setVertexBuffer</code> is not supported by SharedBatch.
*
* @param buff
* the new vertex buffer.
*/
public void setVertexBuffer(FloatBuffer buff) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getNormalBuffer</code> retrieves the target geometry's normal
* information as a float buffer.
*
* @return the float buffer containing the target geometry information.
*/
public FloatBuffer getNormalBuffer() {
return target.getNormalBuffer();
}
/**
* <code>setNormalBuffer</code> is not supported by SharedBatch.
*
* @param buff
* the new normal buffer.
*/
public void setNormalBuffer(FloatBuffer buff) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getColorBuffer</code> retrieves the float buffer that contains
* the target geometry's color information.
*
* @return the buffer that contains the target geometry's color information.
*/
public FloatBuffer getColorBuffer() {
return target.getColorBuffer();
}
/**
* <code>setColorBuffer</code> is not supported by SharedBatch.
*
* @param buff
* the new color buffer.
*/
public void setColorBuffer(FloatBuffer buff) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
*
* <code>getIndexAsBuffer</code> retrieves the target's indices array as
* an <code>IntBuffer</code>.
*
* @return the indices array as an <code>IntBuffer</code>.
*/
public IntBuffer getIndexBuffer() {
return target.getIndexBuffer();
}
/**
*
* <code>setIndexBuffer</code> is not supported by SharedBatch.
*
* @param indices
* the index array as an IntBuffer.
*/
public void setIndexBuffer(IntBuffer indices) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
public int getVertexCount() {
return target.getVertexCount();
}
/**
* Returns the number of triangles the target TriMesh contains.
*
* @return The current number of triangles.
*/
public int getTriangleCount() {
return target.getTriangleCount();
}
public void getTriangle(int index, int[] storage) {
target.getTriangle(index, storage);
}
/**
*
* <code>copyTextureCoords</code> is not supported by SharedBatch.
*
* @param fromIndex
* the coordinates to copy.
* @param toIndex
* the texture unit to set them to.
*/
public void copyTextureCoords(int fromIndex, int toIndex) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>getTextureBuffers</code> retrieves the target geometry's texture
* information contained within a float buffer array.
*
* @return the float buffers that contain the target geometry's texture
* information.
*/
public ArrayList<FloatBuffer> getTextureBuffers() {
return target.getTextureBuffers();
}
/**
*
* <code>getTextureAsFloatBuffer</code> retrieves the texture buffer of a
* given texture unit.
*
* @param textureUnit
* the texture unit to check.
* @return the texture coordinates at the given texture unit.
*/
public FloatBuffer getTextureBuffer(int textureUnit) {
return target.getTextureBuffer(textureUnit);
}
/**
* <code>setTextureBuffer</code> is not supported by SharedBatch.
*
* @param buff
* the new vertex buffer.
*/
public void setTextureBuffer(FloatBuffer buff) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* <code>setTextureBuffer</code> not supported by SharedBatch
*
* @param buff
* the new vertex buffer.
*/
public void setTextureBuffer(FloatBuffer buff, int position) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
/**
* clearBuffers is not supported by SharedBatch
*/
public void clearBuffers() {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
@Override
public void setTangentBuffer(FloatBuffer tangentBuf) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
@Override
public FloatBuffer getTangentBuffer() {
return target.getTangentBuffer();
}
public void setBinormalBuffer(FloatBuffer binormalBuf) {
logger.warning("SharedBatch does not allow the manipulation"
+ "of the the mesh data.");
}
@Override
public FloatBuffer getBinormalBuffer() {
return target.getBinormalBuffer();
}
/**
* <code>updateWorldBound</code> updates the bounding volume that contains
* this geometry. The location of the geometry is based on the location of
* all this node's parents.
*
* @see com.jme.scene.Spatial#updateWorldBound()
*/
public void updateWorldBound() {
if (target.getModelBound() != null) {
worldBound = target.getModelBound().transform(
parentGeom.getWorldRotation(),
parentGeom.getWorldTranslation(),
parentGeom.getWorldScale(), worldBound);
}
}
/**
* <code>setModelBound</code> sets the bounding object for this geometry.
*
* @param modelBound
* the bounding object for this geometry.
*/
public void setModelBound(BoundingVolume modelBound) {
target.setModelBound(modelBound);
}
/**
* <code>updateBound</code> recalculates the bounding object assigned to
* the geometry. This resets it parameters to adjust for any changes to the
* vertex information.
*
*/
public void updateModelBound() {
if (target.getModelBound() != null) {
target.updateModelBound();
updateWorldBound();
}
}
/**
* returns the model bound of the target object.
*/
public BoundingVolume getModelBound() {
return target.getModelBound();
}
/**
* draw renders the target mesh, at the translation, rotation and scale of
* this shared mesh.
*
* @see com.jme.scene.Spatial#draw(com.jme.renderer.Renderer)
*/
public void draw(Renderer r) {
// if this batch is not enabled, don't bother processing it.
if (!isEnabled()) {
return;
}
if (!r.isProcessingQueue()) {
if (r.checkAndAdd(this))
return;
}
target.parentGeom.getWorldTranslation().set(
parentGeom.getWorldTranslation());
target.parentGeom.getWorldRotation().set(parentGeom.getWorldRotation());
target.parentGeom.getWorldScale().set(parentGeom.getWorldScale());
target.setDefaultColor(getDefaultColor());
System.arraycopy(this.states, 0, target.states, 0, states.length);
r.draw(target);
}
public void write(JMEExporter e) throws IOException {
OutputCapsule capsule = e.getCapsule(this);
capsule.write(target, "target", null);
super.write(e);
}
private static TriMesh motherMesh = null;
public void read(JMEImporter e) throws IOException {
InputCapsule capsule = e.getCapsule(this);
target = (TriangleBatch) capsule.readSavable("target", null);
if (target.parentGeom == null) {
if (motherMesh == null) {
motherMesh = new TriMesh("mother");
motherMesh.clearBatches();
}
motherMesh.addBatch(target);
}
super.read(e);
}
@Override
public void lockMeshes(Renderer r) {
target.lockMeshes(r);
}
@Override
public boolean hasDirtyVertices() {
return target.hasDirtyVertices;
}
public String toString() {
if (target.parentGeom != null && parentGeom != null)
return target.parentGeom.getName() + ": SharedBatch "
+ parentGeom.getBatchIndex(this);
return "orphaned batch";
}
@Override
public ColorRGBA getDefaultColor() {
ColorRGBA changedDefaultColor = defaultColor;
if (changedDefaultColor == null) {
return super.getDefaultColor();
} else {
return changedDefaultColor;
}
}
}
| Issue #255: Fixed typo in shared batch.
git-svn-id: 5afc437a751a4ff2ced778146f5faadda0b504ab@3723 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
| src/com/jme/scene/batch/SharedBatch.java | Issue #255: Fixed typo in shared batch. |
|
Java | bsd-3-clause | bb062e2cdccbf811941254104f520759d956dabf | 0 | hispindia/dhis2-Core,msf-oca-his/dhis2-core,dhis2/dhis2-core,msf-oca-his/dhis2-core,msf-oca-his/dhis2-core,hispindia/dhis2-Core,dhis2/dhis2-core,msf-oca-his/dhis2-core,dhis2/dhis2-core,hispindia/dhis2-Core,hispindia/dhis2-Core,hispindia/dhis2-Core,dhis2/dhis2-core,msf-oca-his/dhis2-core,dhis2/dhis2-core | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dxf2.pdfform;
import java.awt.*;
import java.io.IOException;
import java.text.ParseException;
import java.util.Calendar;
import java.util.Collection;
import java.util.List;
import org.hisp.dhis.category.CategoryOptionCombo;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetService;
import org.hisp.dhis.dataset.Section;
import org.hisp.dhis.i18n.I18nFormat;
import org.hisp.dhis.option.Option;
import org.hisp.dhis.option.OptionService;
import org.hisp.dhis.option.OptionSet;
import org.hisp.dhis.period.CalendarPeriodType;
import org.hisp.dhis.period.FinancialAprilPeriodType;
import org.hisp.dhis.period.FinancialJulyPeriodType;
import org.hisp.dhis.period.FinancialOctoberPeriodType;
import org.hisp.dhis.period.MonthlyPeriodType;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.period.QuarterlyPeriodType;
import org.hisp.dhis.period.SixMonthlyAprilPeriodType;
import org.hisp.dhis.period.SixMonthlyPeriodType;
import org.hisp.dhis.period.YearlyPeriodType;
import org.hisp.dhis.program.ProgramStage;
import org.hisp.dhis.program.ProgramStageSection;
import org.hisp.dhis.program.ProgramStageService;
import org.hisp.dhis.util.DateUtils;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import com.google.common.base.Preconditions;
import com.lowagie.text.Chunk;
import com.lowagie.text.Document;
import com.lowagie.text.DocumentException;
import com.lowagie.text.Element;
import com.lowagie.text.Font;
import com.lowagie.text.Paragraph;
import com.lowagie.text.Phrase;
import com.lowagie.text.Rectangle;
import com.lowagie.text.pdf.PdfAnnotation;
import com.lowagie.text.pdf.PdfAppearance;
import com.lowagie.text.pdf.PdfBorderDictionary;
import com.lowagie.text.pdf.PdfContentByte;
import com.lowagie.text.pdf.PdfFormField;
import com.lowagie.text.pdf.PdfPCell;
import com.lowagie.text.pdf.PdfPTable;
import com.lowagie.text.pdf.PdfWriter;
import com.lowagie.text.pdf.RadioCheckField;
import com.lowagie.text.pdf.TextField;
/**
* @author James Chang
*/
@Service( "pdfDataEntryFormService" )
@Scope( "prototype" )
public class DefaultPdfDataEntryFormService
implements PdfDataEntryFormService
{
private static final Color COLOR_BACKGROUDTEXTBOX = Color.getHSBColor( 0.0f, 0.0f, 0.961f );
private static final String TEXT_BLANK = " ";
private static final int TEXTBOXWIDTH_NUMBERTYPE = 35;
private static final int TEXTBOXWIDTH = 160;
private static final int PERIODRANGE_PREVYEARS = 1;
private static final int PERIODRANGE_FUTUREYEARS = 2;
private static final int PERIODRANGE_PREVYEARS_YEARLY = 5;
private static final int PERIODRANGE_FUTUREYEARS_YEARLY = 6;
private static final Integer MAX_OPTIONS_DISPLAYED = 30;
private static final Integer PROGRAM_FORM_ROW_NUMBER = 10;
// TODO this variable should not have class scope
private PdfFormFontSettings pdfFormFontSettings;
// TODO this variable should not have class scope
private I18nFormat format;
private DataSetService dataSetService;
private ProgramStageService programStageService;
private OptionService optionService;
public DefaultPdfDataEntryFormService( DataSetService dataSetService, ProgramStageService programStageService,
OptionService optionService )
{
this.dataSetService = dataSetService;
this.programStageService = programStageService;
this.optionService = optionService;
Preconditions.checkNotNull( dataSetService );
Preconditions.checkNotNull( programStageService );
Preconditions.checkNotNull( optionService );
}
// -------------------------------------------------------------------------
// PdfDataEntryFormService implementation
// -------------------------------------------------------------------------
@Override
public void generatePDFDataEntryForm( Document document, PdfWriter writer, String dataSetUid, int typeId,
Rectangle pageSize, PdfFormFontSettings pdfFormFontSettings, I18nFormat format )
{
try
{
this.pdfFormFontSettings = pdfFormFontSettings;
this.format = format;
document.setPageSize( pageSize );
document.open();
if ( typeId == PdfDataEntryFormUtil.DATATYPE_DATASET )
{
setDataSet_DocumentContent( document, writer, dataSetUid );
}
else if ( typeId == PdfDataEntryFormUtil.DATATYPE_PROGRAMSTAGE )
{
setProgramStage_DocumentContent( document, writer, dataSetUid );
}
}
catch ( Exception ex )
{
throw new RuntimeException( ex );
}
finally
{
document.close();
}
}
private void setDataSet_DocumentContent( Document document, PdfWriter writer, String dataSetUid )
throws Exception
{
DataSet dataSet = dataSetService.getDataSet( dataSetUid );
if ( dataSet == null )
{
throw new RuntimeException( "Error - DataSet not found for UID " + dataSetUid );
}
setDataSet_DocumentTopSection( document, dataSet );
document.add( Chunk.NEWLINE );
List<Period> periods = getPeriods_DataSet( dataSet.getPeriodType() );
PdfPTable mainTable = new PdfPTable( 1 ); // Table with 1 cell.
setMainTable( mainTable );
insertTable_OrgAndPeriod( mainTable, writer, periods );
insertTable_TextRow( writer, mainTable, TEXT_BLANK );
insertTable_DataSet( mainTable, writer, dataSet );
document.add( mainTable );
document.add( Chunk.NEWLINE );
document.add( Chunk.NEWLINE );
insertSaveAsButton( document, writer, PdfDataEntryFormUtil.LABELCODE_BUTTON_SAVEAS, dataSet.getDisplayName() );
}
private void setDataSet_DocumentTopSection( Document document, DataSet dataSet )
throws DocumentException
{
document.add( new Paragraph( dataSet.getDisplayName(), pdfFormFontSettings
.getFont( PdfFormFontSettings.FONTTYPE_TITLE ) ) );
document.add( new Paragraph( dataSet.getDisplayDescription(), pdfFormFontSettings
.getFont( PdfFormFontSettings.FONTTYPE_DESCRIPTION ) ) );
}
private List<Period> getPeriods_DataSet( PeriodType periodType )
throws ParseException
{
Period period = setPeriodDateRange( periodType );
return ((CalendarPeriodType) periodType).generatePeriods( period.getStartDate(), period.getEndDate() );
}
private void setMainTable( PdfPTable mainTable )
{
mainTable.setWidthPercentage( 100.0f );
mainTable.setHorizontalAlignment( Element.ALIGN_LEFT );
}
private void insertTable_DataSet( PdfPTable mainTable, PdfWriter writer, DataSet dataSet )
throws IOException,
DocumentException
{
Rectangle rectangle = new Rectangle( TEXTBOXWIDTH, PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
if ( dataSet.getSections().size() > 0 )
{
for ( Section section : dataSet.getSections() )
{
insertTable_DataSetSections( mainTable, writer, rectangle, section.getDataElements(),
section.getDisplayName() );
}
}
else
{
insertTable_DataSetSections( mainTable, writer, rectangle, dataSet.getDataElements(), "" );
}
}
private void insertTable_DataSetSections( PdfPTable mainTable, PdfWriter writer, Rectangle rectangle,
Collection<DataElement> dataElements, String sectionName )
throws IOException,
DocumentException
{
boolean hasBorder = true;
// Add Section Name and Section Spacing
insertTable_TextRow( writer, mainTable, TEXT_BLANK );
if ( sectionName != null && !sectionName.isEmpty() )
{
insertTable_TextRow( writer, mainTable, sectionName,
pdfFormFontSettings.getFont( PdfFormFontSettings.FONTTYPE_SECTIONHEADER ) );
}
// Create A Table To Add For Each Section
PdfPTable table = new PdfPTable( 2 );
table.setWidths( new int[] { 2, 1 } );
table.setWidthPercentage( 100.0f );
table.setHorizontalAlignment( Element.ALIGN_LEFT );
// For each DataElement and Category Combo of the dataElement, create
// row.
for ( DataElement dataElement : dataElements )
{
for ( CategoryOptionCombo categoryOptionCombo : dataElement.getSortedCategoryOptionCombos() )
{
String categoryOptionComboDisplayName = "";
// Hide Default category option combo name
if ( !categoryOptionCombo.isDefault() )
{
categoryOptionComboDisplayName = categoryOptionCombo.getDisplayName();
}
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
dataElement.getFormNameFallback() + " " +
categoryOptionComboDisplayName,
Element.ALIGN_RIGHT );
String strFieldLabel = PdfDataEntryFormUtil.LABELCODE_DATAENTRYTEXTFIELD + dataElement.getUid() + "_"
+ categoryOptionCombo.getUid();
ValueType valueType = dataElement.getValueType();
// Yes Only case - render as check-box
if ( ValueType.TRUE_ONLY == valueType )
{
addCell_WithCheckBox( table, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), strFieldLabel );
}
else if ( ValueType.BOOLEAN == valueType )
{
// Create Yes - true, No - false, Select..
String[] optionList = new String[] { "[No Value]", "Yes", "No" };
String[] valueList = new String[] { "", "true", "false" };
// addCell_WithRadioButton(table, writer, strFieldLabel);
addCell_WithDropDownListField( table, rectangle, writer,
PdfDataEntryFormUtil.getPdfPCell( hasBorder ), strFieldLabel, optionList, valueList );
}
else if ( valueType.isNumeric() )
{
Rectangle rectNum = new Rectangle( TEXTBOXWIDTH_NUMBERTYPE,
PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
addCell_WithTextField( table, rectNum, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
strFieldLabel, PdfFieldCell.TYPE_TEXT_NUMBER );
}
else
{
addCell_WithTextField( table, rectangle, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
strFieldLabel );
}
}
}
PdfPCell cell_withInnerTable = new PdfPCell( table );
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
mainTable.addCell( cell_withInnerTable );
}
private void setProgramStage_DocumentContent( Document document, PdfWriter writer, String programStageUid )
throws Exception
{
ProgramStage programStage = programStageService.getProgramStage( programStageUid );
if ( programStage == null )
{
throw new RuntimeException( "Error - ProgramStage not found for UID " + programStageUid );
}
else
{
// Get Rectangle with TextBox Width to be used
Rectangle rectangle = new Rectangle( 0, 0, TEXTBOXWIDTH, PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
// Create Main Layout table and set the properties
PdfPTable mainTable = getProgramStageMainTable();
// Generate Period List for ProgramStage
List<Period> periods = getProgramStagePeriodList();
// Add Org Unit, Period, Hidden ProgramStageID Field
insertTable_OrgAndPeriod( mainTable, writer, periods );
insertTable_TextRow( writer, mainTable, TEXT_BLANK );
// Add ProgramStage Field - programStage.getId();
insertTable_HiddenValue( mainTable, rectangle, writer,
PdfDataEntryFormUtil.LABELCODE_PROGRAMSTAGEIDTEXTBOX, String.valueOf( programStage.getId() ) );
// Add ProgramStage Content to PDF - [The Main Section]
insertTable_ProgramStage( mainTable, writer, programStage );
// Add the mainTable to document
document.add( mainTable );
}
}
private void insertTable_ProgramStage( PdfPTable mainTable, PdfWriter writer, ProgramStage programStage )
throws IOException,
DocumentException
{
Rectangle rectangle = new Rectangle( TEXTBOXWIDTH, PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
// Add Program Stage Sections
if ( programStage.getProgramStageSections().size() > 0 )
{
// Sectioned Ones
for ( ProgramStageSection section : programStage.getProgramStageSections() )
{
insertTable_ProgramStageSections( mainTable, rectangle, writer, section.getDataElements() );
}
}
else
{
// Default one
insertTable_ProgramStageSections( mainTable, rectangle, writer, programStage.getDataElements() );
}
}
private void insertTable_ProgramStageSections( PdfPTable mainTable, Rectangle rectangle, PdfWriter writer,
Collection<DataElement> dataElements )
throws IOException,
DocumentException
{
boolean hasBorder = false;
// Add one to column count due to date entry + one hidden height set
// field.
int colCount = dataElements.size() + 1 + 1;
PdfPTable table = new PdfPTable( colCount ); // Code 1
float totalWidth = 800f;
float firstCellWidth_dateEntry = PdfDataEntryFormUtil.UNITSIZE_DEFAULT * 3;
float lastCellWidth_hidden = PdfDataEntryFormUtil.UNITSIZE_DEFAULT;
float dataElementCellWidth = (totalWidth - firstCellWidth_dateEntry - lastCellWidth_hidden)
/ dataElements.size();
// Create 2 types of Rectangles, one for Date field, one for data
// elements - to be used when rendering them.
Rectangle rectangleDate = new Rectangle( 0, 0, PdfDataEntryFormUtil.UNITSIZE_DEFAULT * 2,
PdfDataEntryFormUtil.UNITSIZE_DEFAULT );
Rectangle rectangleDataElement = new Rectangle( 0, 0, dataElementCellWidth,
PdfDataEntryFormUtil.UNITSIZE_DEFAULT );
// Cell Width Set
float[] cellWidths = new float[colCount];
// Date Field Settings
cellWidths[0] = firstCellWidth_dateEntry;
for ( int i = 1; i < colCount - 1; i++ )
{
cellWidths[i] = dataElementCellWidth;
}
cellWidths[colCount - 1] = lastCellWidth_hidden;
table.setWidths( cellWidths );
// Create Header
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), "Date", Element.ALIGN_CENTER );
// Add Program Data Elements Columns
for ( DataElement dataElement : dataElements )
{
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), dataElement.getFormNameFallback(),
Element.ALIGN_CENTER );
}
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), TEXT_BLANK, Element.ALIGN_CENTER );
// ADD A HIDDEN INFO FOR ProgramStageID
// Print rows, having the data elements repeating on each column.
for ( int rowNo = 1; rowNo <= PROGRAM_FORM_ROW_NUMBER; rowNo++ )
{
// Add Date Column
String strFieldDateLabel = PdfDataEntryFormUtil.LABELCODE_DATADATETEXTFIELD + Integer.toString( rowNo );
addCell_WithTextField( table, rectangleDate, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
strFieldDateLabel );
// Add Program Data Elements Columns
for ( DataElement dataElement : dataElements )
{
OptionSet optionSet = dataElement.getOptionSet();
String strFieldLabel = PdfDataEntryFormUtil.LABELCODE_DATAENTRYTEXTFIELD
+ Long.toString( dataElement.getId() )
// + "_" + Integer.toString(programStageId) + "_" +
// Integer.toString(rowNo);
+ "_" + Integer.toString( rowNo );
if ( optionSet != null )
{
String query = ""; // Get All Option
// TODO: This gets repeated <- Create an array of the
// options. and apply only once.
List<Option> options = optionService.getOptions( optionSet.getId(), query, MAX_OPTIONS_DISPLAYED );
addCell_WithDropDownListField( table, rectangleDataElement, writer,
PdfDataEntryFormUtil.getPdfPCell( hasBorder ), strFieldLabel, options.toArray( new String[0] ),
options.toArray( new String[0] ) );
}
else
{
// NOTE: When Rendering for DataSet, DataElement's OptionSet
// does not get rendered.
// Only for events, it gets rendered as dropdown list.
addCell_WithTextField( table, rectangleDataElement, writer,
PdfDataEntryFormUtil.getPdfPCell( hasBorder ), strFieldLabel );
}
}
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), TEXT_BLANK, Element.ALIGN_LEFT );
}
PdfPCell cell_withInnerTable = new PdfPCell( table );
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
mainTable.addCell( cell_withInnerTable );
}
private List<Period> getProgramStagePeriodList()
throws ParseException
{
PeriodType periodType = PeriodType.getPeriodTypeByName( MonthlyPeriodType.NAME );
Period period = setPeriodDateRange( periodType );
return ((CalendarPeriodType) periodType).generatePeriods( period.getStartDate(), period.getEndDate() );
}
private PdfPTable getProgramStageMainTable()
{
PdfPTable mainTable = new PdfPTable( 1 ); // Code 1
mainTable.setTotalWidth( 800f );
mainTable.setLockedWidth( true );
mainTable.setHorizontalAlignment( Element.ALIGN_LEFT );
return mainTable;
}
private void insertTable_OrgAndPeriod( PdfPTable mainTable, PdfWriter writer, List<Period> periods )
throws IOException,
DocumentException
{
boolean hasBorder = false;
float width = 220.0f;
// Input TextBox size
Rectangle rectangle = new Rectangle( width, PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
// Add Organization ID/Period textfield
// Create A table to add for each group AT HERE
PdfPTable table = new PdfPTable( 2 ); // Code 1
table.setWidths( new int[] { 1, 3 } );
table.setHorizontalAlignment( Element.ALIGN_LEFT );
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), "Organization unit identifier",
Element.ALIGN_RIGHT );
addCell_WithTextField( table, rectangle, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
PdfDataEntryFormUtil.LABELCODE_ORGID,
PdfFieldCell.TYPE_TEXT_ORGUNIT );
String[] periodsTitle = getPeriodTitles( periods, format );
String[] periodsValue = getPeriodValues( periods );
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), "Period", Element.ALIGN_RIGHT );
addCell_WithDropDownListField( table, rectangle, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
PdfDataEntryFormUtil.LABELCODE_PERIODID, periodsTitle, periodsValue );
// Add to the main table
PdfPCell cell_withInnerTable = new PdfPCell( table );
// cell_withInnerTable.setPadding(0);
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
cell_withInnerTable.setHorizontalAlignment( Element.ALIGN_LEFT );
mainTable.addCell( cell_withInnerTable );
}
private void insertTable_HiddenValue( PdfPTable mainTable, Rectangle rectangle, PdfWriter writer, String fieldName,
String value )
throws IOException,
DocumentException
{
boolean hasBorder = false;
// Add Organization ID/Period textfield
// Create A table to add for each group AT HERE
PdfPTable table = new PdfPTable( 1 ); // Code 1
addCell_WithTextField( table, rectangle, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), fieldName,
value );
// Add to the main table
PdfPCell cell_withInnerTable = new PdfPCell( table );
// cell_withInnerTable.setPadding(0);
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
mainTable.addCell( cell_withInnerTable );
}
private void insertTable_TextRow( PdfWriter writer, PdfPTable mainTable, String text )
{
insertTable_TextRow( writer, mainTable, text,
pdfFormFontSettings.getFont( PdfFormFontSettings.FONTTYPE_BODY ) );
}
private void insertTable_TextRow( PdfWriter writer, PdfPTable mainTable, String text, Font font )
{
boolean hasBorder = false;
// Add Organization ID/Period textfield
// Create A table to add for each group AT HERE
PdfPTable table = new PdfPTable( 1 );
table.setHorizontalAlignment( Element.ALIGN_LEFT );
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), text, Element.ALIGN_LEFT, font );
// Add to the main table
PdfPCell cell_withInnerTable = new PdfPCell( table );
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
mainTable.addCell( cell_withInnerTable );
}
// Insert 'Save As' button to document.
// @SuppressWarnings( "unused" )
private void insertSaveAsButton( Document document, PdfWriter writer, String name, String dataSetName )
throws DocumentException
{
boolean hasBorder = false;
// Button Table
PdfPTable tableButton = new PdfPTable( 1 );
tableButton.setWidthPercentage( 20.0f );
float buttonHeight = PdfDataEntryFormUtil.UNITSIZE_DEFAULT + 5;
tableButton.setHorizontalAlignment( Element.ALIGN_CENTER );
// FIXME
String jsAction = "var newFileName = this.getField(\"" + PdfDataEntryFormUtil.LABELCODE_PERIODID
+ "\").value + ' ' + "
+ " this.getField(\"" + PdfDataEntryFormUtil.LABELCODE_ORGID + "\").value + ' ' + "
+ " \"" + dataSetName + ".pdf\";"
+ "var returnVal = app.alert('This will save this PDF file as ' + newFileName + '. Do you want to Continue?', 1, 2);"
+ "if(returnVal == 4) { "
+ " var aMyPath = this.path.split(\"/\");"
+ " aMyPath.pop();"
+ " aMyPath.push(newFileName);"
+ " this.saveAs(aMyPath.join(\"/\"));"
+ " this.saveAs({cPath:cMyPath, bPromptToOverwrite:true});"
+ " app.alert('File Saved.', 1);"
+ "} ";
addCell_WithPushButtonField( tableButton, writer, PdfDataEntryFormUtil.getPdfPCell( buttonHeight,
PdfDataEntryFormUtil.CELL_COLUMN_TYPE_ENTRYFIELD, hasBorder ), name, jsAction );
document.add( tableButton );
}
private void addCell_Text( PdfPTable table, PdfPCell cell, String text, int horizontalAlignment )
{
addCell_Text( table, cell, text, horizontalAlignment,
pdfFormFontSettings.getFont( PdfFormFontSettings.FONTTYPE_BODY ) );
}
private void addCell_Text( PdfPTable table, PdfPCell cell, String text, int horizontalAlignment, Font font )
{
cell.setHorizontalAlignment( horizontalAlignment );
cell.setPhrase( new Phrase( text, font ) );
table.addCell( cell ); // TODO: change this with cellEvent?
}
private void addCell_WithTextField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName )
throws IOException,
DocumentException
{
addCell_WithTextField( table, rect, writer, cell, strfldName, PdfFieldCell.TYPE_DEFAULT, "" );
}
private void addCell_WithTextField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName,
int fieldCellType )
throws IOException,
DocumentException
{
addCell_WithTextField( table, rect, writer, cell, strfldName, fieldCellType, "" );
}
private void addCell_WithTextField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName,
String value )
throws IOException,
DocumentException
{
addCell_WithTextField( table, rect, writer, cell, strfldName, PdfFieldCell.TYPE_DEFAULT, value );
}
private void addCell_WithTextField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName,
int fieldCellType, String value )
throws IOException,
DocumentException
{
TextField nameField = new TextField( writer, rect, strfldName );
nameField.setBorderWidth( 1 );
nameField.setBorderColor( Color.BLACK );
nameField.setBorderStyle( PdfBorderDictionary.STYLE_SOLID );
nameField.setBackgroundColor( COLOR_BACKGROUDTEXTBOX );
nameField.setText( value );
nameField.setAlignment( Element.ALIGN_RIGHT );
nameField.setFont( pdfFormFontSettings.getFont( PdfFormFontSettings.FONTTYPE_BODY ).getBaseFont() );
cell.setCellEvent(
new PdfFieldCell( nameField.getTextField(), rect.getWidth(), rect.getHeight(), fieldCellType, writer ) );
table.addCell( cell );
}
private void addCell_WithDropDownListField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName, String[] optionList,
String[] valueList )
throws IOException,
DocumentException
{
TextField textList = new TextField( writer, rect, strfldName );
textList.setChoices( optionList );
textList.setChoiceExports( valueList );
textList.setBorderWidth( 1 );
textList.setBorderColor( Color.BLACK );
textList.setBorderStyle( PdfBorderDictionary.STYLE_SOLID );
textList.setBackgroundColor( COLOR_BACKGROUDTEXTBOX );
PdfFormField dropDown = textList.getComboField();
cell.setCellEvent( new PdfFieldCell( dropDown, rect.getWidth(), rect.getHeight(), writer ) );
table.addCell( cell );
}
private void addCell_WithCheckBox( PdfPTable table, PdfWriter writer, PdfPCell cell, String strfldName )
throws IOException,
DocumentException
{
float sizeDefault = PdfDataEntryFormUtil.UNITSIZE_DEFAULT;
RadioCheckField checkbox = new RadioCheckField( writer, new Rectangle( sizeDefault, sizeDefault ), "Yes",
"On" );
checkbox.setBorderWidth( 1 );
checkbox.setBorderColor( Color.BLACK );
PdfFormField checkboxfield = checkbox.getCheckField();
checkboxfield.setFieldName( strfldName + "_" + PdfFieldCell.TPYEDEFINE_NAME + PdfFieldCell.TYPE_CHECKBOX );
setCheckboxAppearance( checkboxfield, writer.getDirectContent(), sizeDefault );
cell.setCellEvent(
new PdfFieldCell( checkboxfield, sizeDefault, sizeDefault, PdfFieldCell.TYPE_CHECKBOX, writer ) );
table.addCell( cell );
}
@SuppressWarnings( "unused" )
private void addCell_WithRadioButton( PdfPTable table, PdfWriter writer, PdfPCell cell, String strfldName )
{
PdfFormField radiogroupField = PdfFormField.createRadioButton( writer, true );
radiogroupField.setFieldName( strfldName );
cell.setCellEvent( new PdfFieldCell( radiogroupField, new String[] { "Yes", "No", "null" }, new String[] {
"true", "false", "" }, "", 30.0f, PdfDataEntryFormUtil.UNITSIZE_DEFAULT, PdfFieldCell.TYPE_RADIOBUTTON,
writer ) );
table.addCell( cell );
writer.addAnnotation( radiogroupField );
}
private void addCell_WithPushButtonField( PdfPTable table, PdfWriter writer, PdfPCell cell, String strfldName,
String jsAction )
{
cell.setCellEvent( new PdfFieldCell( null, jsAction, "BTN_SAVEPDF", "Save PDF", PdfFieldCell.TYPE_BUTTON,
writer ) );
table.addCell( cell );
}
public String[] getPeriodValues( List<Period> periods )
{
String[] periodValues = new String[periods.size()];
for ( int i = 0; i < periods.size(); i++ )
{
periodValues[i] = periods.get( i ).getIsoDate();
}
return periodValues;
}
public String[] getPeriodTitles( List<Period> periods, I18nFormat format )
{
String[] periodTitles = new String[periods.size()];
for ( int i = 0; i < periods.size(); i++ )
{
Period period = periods.get( i );
periodTitles[i] = format.formatPeriod( period );
periodTitles[i] += " - " + DateUtils.getMediumDateString( period.getStartDate() )
+ " - " + DateUtils.getMediumDateString( period.getEndDate() );
}
return periodTitles;
}
private Period setPeriodDateRange( PeriodType periodType )
throws ParseException
{
Period period = new Period();
Calendar currentDate = Calendar.getInstance();
int currYear = currentDate.get( Calendar.YEAR );
int startYear = currYear - PERIODRANGE_PREVYEARS;
int endYear = currYear + PERIODRANGE_FUTUREYEARS;
if ( periodType.getName().equals( QuarterlyPeriodType.NAME )
|| periodType.getName().equals( SixMonthlyPeriodType.NAME )
|| periodType.getName().equals( SixMonthlyAprilPeriodType.NAME )
|| periodType.getName().equals( YearlyPeriodType.NAME )
|| periodType.getName().equals( FinancialAprilPeriodType.NAME )
|| periodType.getName().equals( FinancialJulyPeriodType.NAME )
|| periodType.getName().equals( FinancialOctoberPeriodType.NAME ) )
{
startYear = currYear - PERIODRANGE_PREVYEARS_YEARLY;
endYear = currYear + PERIODRANGE_FUTUREYEARS_YEARLY;
}
period.setStartDate( DateUtils.getMediumDate( String.valueOf( startYear ) + "-01-01" ) );
period.setEndDate( DateUtils.getMediumDate( String.valueOf( endYear ) + "-01-01" ) );
return period;
}
private void setCheckboxAppearance( PdfFormField checkboxfield, PdfContentByte canvas, float width )
{
PdfAppearance[] onOff = new PdfAppearance[2];
onOff[0] = canvas.createAppearance( width + 2, width + 2 );
onOff[0].rectangle( 1, 1, width, width );
onOff[0].stroke();
onOff[1] = canvas.createAppearance( width + 2, width + 2 );
onOff[1].setRGBColorFill( 255, 128, 128 );
onOff[1].rectangle( 1, 1, width, width );
onOff[1].fillStroke();
onOff[1].moveTo( 1, 1 );
onOff[1].lineTo( width + 1, width + 1 );
onOff[1].moveTo( 1, width + 1 );
onOff[1].lineTo( width + 1, 1 );
onOff[1].stroke();
checkboxfield.setAppearance( PdfAnnotation.APPEARANCE_NORMAL, "Off", onOff[0] );
checkboxfield.setAppearance( PdfAnnotation.APPEARANCE_NORMAL, "On", onOff[1] );
}
}
| dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/pdfform/DefaultPdfDataEntryFormService.java | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dxf2.pdfform;
import java.awt.*;
import java.io.IOException;
import java.text.ParseException;
import java.util.Calendar;
import java.util.Collection;
import java.util.List;
import lombok.AllArgsConstructor;
import org.hisp.dhis.category.CategoryOptionCombo;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetService;
import org.hisp.dhis.dataset.Section;
import org.hisp.dhis.i18n.I18nFormat;
import org.hisp.dhis.option.Option;
import org.hisp.dhis.option.OptionService;
import org.hisp.dhis.option.OptionSet;
import org.hisp.dhis.period.CalendarPeriodType;
import org.hisp.dhis.period.FinancialAprilPeriodType;
import org.hisp.dhis.period.FinancialJulyPeriodType;
import org.hisp.dhis.period.FinancialOctoberPeriodType;
import org.hisp.dhis.period.MonthlyPeriodType;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.period.QuarterlyPeriodType;
import org.hisp.dhis.period.SixMonthlyAprilPeriodType;
import org.hisp.dhis.period.SixMonthlyPeriodType;
import org.hisp.dhis.period.YearlyPeriodType;
import org.hisp.dhis.program.ProgramStage;
import org.hisp.dhis.program.ProgramStageSection;
import org.hisp.dhis.program.ProgramStageService;
import org.hisp.dhis.util.DateUtils;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import com.lowagie.text.Chunk;
import com.lowagie.text.Document;
import com.lowagie.text.DocumentException;
import com.lowagie.text.Element;
import com.lowagie.text.Font;
import com.lowagie.text.Paragraph;
import com.lowagie.text.Phrase;
import com.lowagie.text.Rectangle;
import com.lowagie.text.pdf.PdfAnnotation;
import com.lowagie.text.pdf.PdfAppearance;
import com.lowagie.text.pdf.PdfBorderDictionary;
import com.lowagie.text.pdf.PdfContentByte;
import com.lowagie.text.pdf.PdfFormField;
import com.lowagie.text.pdf.PdfPCell;
import com.lowagie.text.pdf.PdfPTable;
import com.lowagie.text.pdf.PdfWriter;
import com.lowagie.text.pdf.RadioCheckField;
import com.lowagie.text.pdf.TextField;
/**
* @author James Chang
*/
@AllArgsConstructor
@Service( "pdfDataEntryFormService" )
@Scope( "prototype" )
public class DefaultPdfDataEntryFormService
implements PdfDataEntryFormService
{
private static final Color COLOR_BACKGROUDTEXTBOX = Color.getHSBColor( 0.0f, 0.0f, 0.961f );
private static final String TEXT_BLANK = " ";
private static final int TEXTBOXWIDTH_NUMBERTYPE = 35;
private static final int TEXTBOXWIDTH = 160;
private static final int PERIODRANGE_PREVYEARS = 1;
private static final int PERIODRANGE_FUTUREYEARS = 2;
private static final int PERIODRANGE_PREVYEARS_YEARLY = 5;
private static final int PERIODRANGE_FUTUREYEARS_YEARLY = 6;
private static final Integer MAX_OPTIONS_DISPLAYED = 30;
private static final Integer PROGRAM_FORM_ROW_NUMBER = 10;
private PdfFormFontSettings pdfFormFontSettings;
private I18nFormat format;
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private final DataSetService dataSetService;
private final ProgramStageService programStageService;
private final OptionService optionService;
// -------------------------------------------------------------------------
// PdfDataEntryFormService implementation
// -------------------------------------------------------------------------
@Override
public void generatePDFDataEntryForm( Document document, PdfWriter writer, String dataSetUid, int typeId,
Rectangle pageSize, PdfFormFontSettings pdfFormFontSettings, I18nFormat format )
{
try
{
this.pdfFormFontSettings = pdfFormFontSettings;
this.format = format;
document.setPageSize( pageSize );
document.open();
if ( typeId == PdfDataEntryFormUtil.DATATYPE_DATASET )
{
setDataSet_DocumentContent( document, writer, dataSetUid );
}
else if ( typeId == PdfDataEntryFormUtil.DATATYPE_PROGRAMSTAGE )
{
setProgramStage_DocumentContent( document, writer, dataSetUid );
}
}
catch ( Exception ex )
{
throw new RuntimeException( ex );
}
finally
{
document.close();
}
}
private void setDataSet_DocumentContent( Document document, PdfWriter writer, String dataSetUid )
throws Exception
{
DataSet dataSet = dataSetService.getDataSet( dataSetUid );
if ( dataSet == null )
{
throw new RuntimeException( "Error - DataSet not found for UID " + dataSetUid );
}
setDataSet_DocumentTopSection( document, dataSet );
document.add( Chunk.NEWLINE );
List<Period> periods = getPeriods_DataSet( dataSet.getPeriodType() );
PdfPTable mainTable = new PdfPTable( 1 ); // Table with 1 cell.
setMainTable( mainTable );
insertTable_OrgAndPeriod( mainTable, writer, periods );
insertTable_TextRow( writer, mainTable, TEXT_BLANK );
insertTable_DataSet( mainTable, writer, dataSet );
document.add( mainTable );
document.add( Chunk.NEWLINE );
document.add( Chunk.NEWLINE );
insertSaveAsButton( document, writer, PdfDataEntryFormUtil.LABELCODE_BUTTON_SAVEAS, dataSet.getDisplayName() );
}
private void setDataSet_DocumentTopSection( Document document, DataSet dataSet )
throws DocumentException
{
document.add( new Paragraph( dataSet.getDisplayName(), pdfFormFontSettings
.getFont( PdfFormFontSettings.FONTTYPE_TITLE ) ) );
document.add( new Paragraph( dataSet.getDisplayDescription(), pdfFormFontSettings
.getFont( PdfFormFontSettings.FONTTYPE_DESCRIPTION ) ) );
}
private List<Period> getPeriods_DataSet( PeriodType periodType )
throws ParseException
{
Period period = setPeriodDateRange( periodType );
return ((CalendarPeriodType) periodType).generatePeriods( period.getStartDate(), period.getEndDate() );
}
private void setMainTable( PdfPTable mainTable )
{
mainTable.setWidthPercentage( 100.0f );
mainTable.setHorizontalAlignment( Element.ALIGN_LEFT );
}
private void insertTable_DataSet( PdfPTable mainTable, PdfWriter writer, DataSet dataSet )
throws IOException,
DocumentException
{
Rectangle rectangle = new Rectangle( TEXTBOXWIDTH, PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
if ( dataSet.getSections().size() > 0 )
{
for ( Section section : dataSet.getSections() )
{
insertTable_DataSetSections( mainTable, writer, rectangle, section.getDataElements(),
section.getDisplayName() );
}
}
else
{
insertTable_DataSetSections( mainTable, writer, rectangle, dataSet.getDataElements(), "" );
}
}
private void insertTable_DataSetSections( PdfPTable mainTable, PdfWriter writer, Rectangle rectangle,
Collection<DataElement> dataElements, String sectionName )
throws IOException,
DocumentException
{
boolean hasBorder = true;
// Add Section Name and Section Spacing
insertTable_TextRow( writer, mainTable, TEXT_BLANK );
if ( sectionName != null && !sectionName.isEmpty() )
{
insertTable_TextRow( writer, mainTable, sectionName,
pdfFormFontSettings.getFont( PdfFormFontSettings.FONTTYPE_SECTIONHEADER ) );
}
// Create A Table To Add For Each Section
PdfPTable table = new PdfPTable( 2 );
table.setWidths( new int[] { 2, 1 } );
table.setWidthPercentage( 100.0f );
table.setHorizontalAlignment( Element.ALIGN_LEFT );
// For each DataElement and Category Combo of the dataElement, create
// row.
for ( DataElement dataElement : dataElements )
{
for ( CategoryOptionCombo categoryOptionCombo : dataElement.getSortedCategoryOptionCombos() )
{
String categoryOptionComboDisplayName = "";
// Hide Default category option combo name
if ( !categoryOptionCombo.isDefault() )
{
categoryOptionComboDisplayName = categoryOptionCombo.getDisplayName();
}
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
dataElement.getFormNameFallback() + " " +
categoryOptionComboDisplayName,
Element.ALIGN_RIGHT );
String strFieldLabel = PdfDataEntryFormUtil.LABELCODE_DATAENTRYTEXTFIELD + dataElement.getUid() + "_"
+ categoryOptionCombo.getUid();
ValueType valueType = dataElement.getValueType();
// Yes Only case - render as check-box
if ( ValueType.TRUE_ONLY == valueType )
{
addCell_WithCheckBox( table, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), strFieldLabel );
}
else if ( ValueType.BOOLEAN == valueType )
{
// Create Yes - true, No - false, Select..
String[] optionList = new String[] { "[No Value]", "Yes", "No" };
String[] valueList = new String[] { "", "true", "false" };
// addCell_WithRadioButton(table, writer, strFieldLabel);
addCell_WithDropDownListField( table, rectangle, writer,
PdfDataEntryFormUtil.getPdfPCell( hasBorder ), strFieldLabel, optionList, valueList );
}
else if ( valueType.isNumeric() )
{
Rectangle rectNum = new Rectangle( TEXTBOXWIDTH_NUMBERTYPE,
PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
addCell_WithTextField( table, rectNum, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
strFieldLabel, PdfFieldCell.TYPE_TEXT_NUMBER );
}
else
{
addCell_WithTextField( table, rectangle, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
strFieldLabel );
}
}
}
PdfPCell cell_withInnerTable = new PdfPCell( table );
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
mainTable.addCell( cell_withInnerTable );
}
private void setProgramStage_DocumentContent( Document document, PdfWriter writer, String programStageUid )
throws Exception
{
ProgramStage programStage = programStageService.getProgramStage( programStageUid );
if ( programStage == null )
{
throw new RuntimeException( "Error - ProgramStage not found for UID " + programStageUid );
}
else
{
// Get Rectangle with TextBox Width to be used
Rectangle rectangle = new Rectangle( 0, 0, TEXTBOXWIDTH, PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
// Create Main Layout table and set the properties
PdfPTable mainTable = getProgramStageMainTable();
// Generate Period List for ProgramStage
List<Period> periods = getProgramStagePeriodList();
// Add Org Unit, Period, Hidden ProgramStageID Field
insertTable_OrgAndPeriod( mainTable, writer, periods );
insertTable_TextRow( writer, mainTable, TEXT_BLANK );
// Add ProgramStage Field - programStage.getId();
insertTable_HiddenValue( mainTable, rectangle, writer,
PdfDataEntryFormUtil.LABELCODE_PROGRAMSTAGEIDTEXTBOX, String.valueOf( programStage.getId() ) );
// Add ProgramStage Content to PDF - [The Main Section]
insertTable_ProgramStage( mainTable, writer, programStage );
// Add the mainTable to document
document.add( mainTable );
}
}
private void insertTable_ProgramStage( PdfPTable mainTable, PdfWriter writer, ProgramStage programStage )
throws IOException,
DocumentException
{
Rectangle rectangle = new Rectangle( TEXTBOXWIDTH, PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
// Add Program Stage Sections
if ( programStage.getProgramStageSections().size() > 0 )
{
// Sectioned Ones
for ( ProgramStageSection section : programStage.getProgramStageSections() )
{
insertTable_ProgramStageSections( mainTable, rectangle, writer, section.getDataElements() );
}
}
else
{
// Default one
insertTable_ProgramStageSections( mainTable, rectangle, writer, programStage.getDataElements() );
}
}
private void insertTable_ProgramStageSections( PdfPTable mainTable, Rectangle rectangle, PdfWriter writer,
Collection<DataElement> dataElements )
throws IOException,
DocumentException
{
boolean hasBorder = false;
// Add one to column count due to date entry + one hidden height set
// field.
int colCount = dataElements.size() + 1 + 1;
PdfPTable table = new PdfPTable( colCount ); // Code 1
float totalWidth = 800f;
float firstCellWidth_dateEntry = PdfDataEntryFormUtil.UNITSIZE_DEFAULT * 3;
float lastCellWidth_hidden = PdfDataEntryFormUtil.UNITSIZE_DEFAULT;
float dataElementCellWidth = (totalWidth - firstCellWidth_dateEntry - lastCellWidth_hidden)
/ dataElements.size();
// Create 2 types of Rectangles, one for Date field, one for data
// elements - to be used when rendering them.
Rectangle rectangleDate = new Rectangle( 0, 0, PdfDataEntryFormUtil.UNITSIZE_DEFAULT * 2,
PdfDataEntryFormUtil.UNITSIZE_DEFAULT );
Rectangle rectangleDataElement = new Rectangle( 0, 0, dataElementCellWidth,
PdfDataEntryFormUtil.UNITSIZE_DEFAULT );
// Cell Width Set
float[] cellWidths = new float[colCount];
// Date Field Settings
cellWidths[0] = firstCellWidth_dateEntry;
for ( int i = 1; i < colCount - 1; i++ )
{
cellWidths[i] = dataElementCellWidth;
}
cellWidths[colCount - 1] = lastCellWidth_hidden;
table.setWidths( cellWidths );
// Create Header
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), "Date", Element.ALIGN_CENTER );
// Add Program Data Elements Columns
for ( DataElement dataElement : dataElements )
{
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), dataElement.getFormNameFallback(),
Element.ALIGN_CENTER );
}
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), TEXT_BLANK, Element.ALIGN_CENTER );
// ADD A HIDDEN INFO FOR ProgramStageID
// Print rows, having the data elements repeating on each column.
for ( int rowNo = 1; rowNo <= PROGRAM_FORM_ROW_NUMBER; rowNo++ )
{
// Add Date Column
String strFieldDateLabel = PdfDataEntryFormUtil.LABELCODE_DATADATETEXTFIELD + Integer.toString( rowNo );
addCell_WithTextField( table, rectangleDate, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
strFieldDateLabel );
// Add Program Data Elements Columns
for ( DataElement dataElement : dataElements )
{
OptionSet optionSet = dataElement.getOptionSet();
String strFieldLabel = PdfDataEntryFormUtil.LABELCODE_DATAENTRYTEXTFIELD
+ Long.toString( dataElement.getId() )
// + "_" + Integer.toString(programStageId) + "_" +
// Integer.toString(rowNo);
+ "_" + Integer.toString( rowNo );
if ( optionSet != null )
{
String query = ""; // Get All Option
// TODO: This gets repeated <- Create an array of the
// options. and apply only once.
List<Option> options = optionService.getOptions( optionSet.getId(), query, MAX_OPTIONS_DISPLAYED );
addCell_WithDropDownListField( table, rectangleDataElement, writer,
PdfDataEntryFormUtil.getPdfPCell( hasBorder ), strFieldLabel, options.toArray( new String[0] ),
options.toArray( new String[0] ) );
}
else
{
// NOTE: When Rendering for DataSet, DataElement's OptionSet
// does not get rendered.
// Only for events, it gets rendered as dropdown list.
addCell_WithTextField( table, rectangleDataElement, writer,
PdfDataEntryFormUtil.getPdfPCell( hasBorder ), strFieldLabel );
}
}
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), TEXT_BLANK, Element.ALIGN_LEFT );
}
PdfPCell cell_withInnerTable = new PdfPCell( table );
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
mainTable.addCell( cell_withInnerTable );
}
private List<Period> getProgramStagePeriodList()
throws ParseException
{
PeriodType periodType = PeriodType.getPeriodTypeByName( MonthlyPeriodType.NAME );
Period period = setPeriodDateRange( periodType );
return ((CalendarPeriodType) periodType).generatePeriods( period.getStartDate(), period.getEndDate() );
}
private PdfPTable getProgramStageMainTable()
{
PdfPTable mainTable = new PdfPTable( 1 ); // Code 1
mainTable.setTotalWidth( 800f );
mainTable.setLockedWidth( true );
mainTable.setHorizontalAlignment( Element.ALIGN_LEFT );
return mainTable;
}
private void insertTable_OrgAndPeriod( PdfPTable mainTable, PdfWriter writer, List<Period> periods )
throws IOException,
DocumentException
{
boolean hasBorder = false;
float width = 220.0f;
// Input TextBox size
Rectangle rectangle = new Rectangle( width, PdfDataEntryFormUtil.CONTENT_HEIGHT_DEFAULT );
// Add Organization ID/Period textfield
// Create A table to add for each group AT HERE
PdfPTable table = new PdfPTable( 2 ); // Code 1
table.setWidths( new int[] { 1, 3 } );
table.setHorizontalAlignment( Element.ALIGN_LEFT );
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), "Organization unit identifier",
Element.ALIGN_RIGHT );
addCell_WithTextField( table, rectangle, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
PdfDataEntryFormUtil.LABELCODE_ORGID,
PdfFieldCell.TYPE_TEXT_ORGUNIT );
String[] periodsTitle = getPeriodTitles( periods, format );
String[] periodsValue = getPeriodValues( periods );
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), "Period", Element.ALIGN_RIGHT );
addCell_WithDropDownListField( table, rectangle, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ),
PdfDataEntryFormUtil.LABELCODE_PERIODID, periodsTitle, periodsValue );
// Add to the main table
PdfPCell cell_withInnerTable = new PdfPCell( table );
// cell_withInnerTable.setPadding(0);
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
cell_withInnerTable.setHorizontalAlignment( Element.ALIGN_LEFT );
mainTable.addCell( cell_withInnerTable );
}
private void insertTable_HiddenValue( PdfPTable mainTable, Rectangle rectangle, PdfWriter writer, String fieldName,
String value )
throws IOException,
DocumentException
{
boolean hasBorder = false;
// Add Organization ID/Period textfield
// Create A table to add for each group AT HERE
PdfPTable table = new PdfPTable( 1 ); // Code 1
addCell_WithTextField( table, rectangle, writer, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), fieldName,
value );
// Add to the main table
PdfPCell cell_withInnerTable = new PdfPCell( table );
// cell_withInnerTable.setPadding(0);
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
mainTable.addCell( cell_withInnerTable );
}
private void insertTable_TextRow( PdfWriter writer, PdfPTable mainTable, String text )
{
insertTable_TextRow( writer, mainTable, text,
pdfFormFontSettings.getFont( PdfFormFontSettings.FONTTYPE_BODY ) );
}
private void insertTable_TextRow( PdfWriter writer, PdfPTable mainTable, String text, Font font )
{
boolean hasBorder = false;
// Add Organization ID/Period textfield
// Create A table to add for each group AT HERE
PdfPTable table = new PdfPTable( 1 );
table.setHorizontalAlignment( Element.ALIGN_LEFT );
addCell_Text( table, PdfDataEntryFormUtil.getPdfPCell( hasBorder ), text, Element.ALIGN_LEFT, font );
// Add to the main table
PdfPCell cell_withInnerTable = new PdfPCell( table );
cell_withInnerTable.setBorder( Rectangle.NO_BORDER );
mainTable.addCell( cell_withInnerTable );
}
// Insert 'Save As' button to document.
// @SuppressWarnings( "unused" )
private void insertSaveAsButton( Document document, PdfWriter writer, String name, String dataSetName )
throws DocumentException
{
boolean hasBorder = false;
// Button Table
PdfPTable tableButton = new PdfPTable( 1 );
tableButton.setWidthPercentage( 20.0f );
float buttonHeight = PdfDataEntryFormUtil.UNITSIZE_DEFAULT + 5;
tableButton.setHorizontalAlignment( Element.ALIGN_CENTER );
// FIXME
String jsAction = "var newFileName = this.getField(\"" + PdfDataEntryFormUtil.LABELCODE_PERIODID
+ "\").value + ' ' + "
+ " this.getField(\"" + PdfDataEntryFormUtil.LABELCODE_ORGID + "\").value + ' ' + "
+ " \"" + dataSetName + ".pdf\";"
+ "var returnVal = app.alert('This will save this PDF file as ' + newFileName + '. Do you want to Continue?', 1, 2);"
+ "if(returnVal == 4) { "
+ " var aMyPath = this.path.split(\"/\");"
+ " aMyPath.pop();"
+ " aMyPath.push(newFileName);"
+ " this.saveAs(aMyPath.join(\"/\"));"
+ " this.saveAs({cPath:cMyPath, bPromptToOverwrite:true});"
+ " app.alert('File Saved.', 1);"
+ "} ";
addCell_WithPushButtonField( tableButton, writer, PdfDataEntryFormUtil.getPdfPCell( buttonHeight,
PdfDataEntryFormUtil.CELL_COLUMN_TYPE_ENTRYFIELD, hasBorder ), name, jsAction );
document.add( tableButton );
}
private void addCell_Text( PdfPTable table, PdfPCell cell, String text, int horizontalAlignment )
{
addCell_Text( table, cell, text, horizontalAlignment,
pdfFormFontSettings.getFont( PdfFormFontSettings.FONTTYPE_BODY ) );
}
private void addCell_Text( PdfPTable table, PdfPCell cell, String text, int horizontalAlignment, Font font )
{
cell.setHorizontalAlignment( horizontalAlignment );
cell.setPhrase( new Phrase( text, font ) );
table.addCell( cell ); // TODO: change this with cellEvent?
}
private void addCell_WithTextField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName )
throws IOException,
DocumentException
{
addCell_WithTextField( table, rect, writer, cell, strfldName, PdfFieldCell.TYPE_DEFAULT, "" );
}
private void addCell_WithTextField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName,
int fieldCellType )
throws IOException,
DocumentException
{
addCell_WithTextField( table, rect, writer, cell, strfldName, fieldCellType, "" );
}
private void addCell_WithTextField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName,
String value )
throws IOException,
DocumentException
{
addCell_WithTextField( table, rect, writer, cell, strfldName, PdfFieldCell.TYPE_DEFAULT, value );
}
private void addCell_WithTextField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName,
int fieldCellType, String value )
throws IOException,
DocumentException
{
TextField nameField = new TextField( writer, rect, strfldName );
nameField.setBorderWidth( 1 );
nameField.setBorderColor( Color.BLACK );
nameField.setBorderStyle( PdfBorderDictionary.STYLE_SOLID );
nameField.setBackgroundColor( COLOR_BACKGROUDTEXTBOX );
nameField.setText( value );
nameField.setAlignment( Element.ALIGN_RIGHT );
nameField.setFont( pdfFormFontSettings.getFont( PdfFormFontSettings.FONTTYPE_BODY ).getBaseFont() );
cell.setCellEvent(
new PdfFieldCell( nameField.getTextField(), rect.getWidth(), rect.getHeight(), fieldCellType, writer ) );
table.addCell( cell );
}
private void addCell_WithDropDownListField( PdfPTable table, Rectangle rect, PdfWriter writer, PdfPCell cell,
String strfldName, String[] optionList,
String[] valueList )
throws IOException,
DocumentException
{
TextField textList = new TextField( writer, rect, strfldName );
textList.setChoices( optionList );
textList.setChoiceExports( valueList );
textList.setBorderWidth( 1 );
textList.setBorderColor( Color.BLACK );
textList.setBorderStyle( PdfBorderDictionary.STYLE_SOLID );
textList.setBackgroundColor( COLOR_BACKGROUDTEXTBOX );
PdfFormField dropDown = textList.getComboField();
cell.setCellEvent( new PdfFieldCell( dropDown, rect.getWidth(), rect.getHeight(), writer ) );
table.addCell( cell );
}
private void addCell_WithCheckBox( PdfPTable table, PdfWriter writer, PdfPCell cell, String strfldName )
throws IOException,
DocumentException
{
float sizeDefault = PdfDataEntryFormUtil.UNITSIZE_DEFAULT;
RadioCheckField checkbox = new RadioCheckField( writer, new Rectangle( sizeDefault, sizeDefault ), "Yes",
"On" );
checkbox.setBorderWidth( 1 );
checkbox.setBorderColor( Color.BLACK );
PdfFormField checkboxfield = checkbox.getCheckField();
checkboxfield.setFieldName( strfldName + "_" + PdfFieldCell.TPYEDEFINE_NAME + PdfFieldCell.TYPE_CHECKBOX );
setCheckboxAppearance( checkboxfield, writer.getDirectContent(), sizeDefault );
cell.setCellEvent(
new PdfFieldCell( checkboxfield, sizeDefault, sizeDefault, PdfFieldCell.TYPE_CHECKBOX, writer ) );
table.addCell( cell );
}
@SuppressWarnings( "unused" )
private void addCell_WithRadioButton( PdfPTable table, PdfWriter writer, PdfPCell cell, String strfldName )
{
PdfFormField radiogroupField = PdfFormField.createRadioButton( writer, true );
radiogroupField.setFieldName( strfldName );
cell.setCellEvent( new PdfFieldCell( radiogroupField, new String[] { "Yes", "No", "null" }, new String[] {
"true", "false", "" }, "", 30.0f, PdfDataEntryFormUtil.UNITSIZE_DEFAULT, PdfFieldCell.TYPE_RADIOBUTTON,
writer ) );
table.addCell( cell );
writer.addAnnotation( radiogroupField );
}
private void addCell_WithPushButtonField( PdfPTable table, PdfWriter writer, PdfPCell cell, String strfldName,
String jsAction )
{
cell.setCellEvent( new PdfFieldCell( null, jsAction, "BTN_SAVEPDF", "Save PDF", PdfFieldCell.TYPE_BUTTON,
writer ) );
table.addCell( cell );
}
public String[] getPeriodValues( List<Period> periods )
{
String[] periodValues = new String[periods.size()];
for ( int i = 0; i < periods.size(); i++ )
{
periodValues[i] = periods.get( i ).getIsoDate();
}
return periodValues;
}
public String[] getPeriodTitles( List<Period> periods, I18nFormat format )
{
String[] periodTitles = new String[periods.size()];
for ( int i = 0; i < periods.size(); i++ )
{
Period period = periods.get( i );
periodTitles[i] = format.formatPeriod( period );
periodTitles[i] += " - " + DateUtils.getMediumDateString( period.getStartDate() )
+ " - " + DateUtils.getMediumDateString( period.getEndDate() );
}
return periodTitles;
}
private Period setPeriodDateRange( PeriodType periodType )
throws ParseException
{
Period period = new Period();
Calendar currentDate = Calendar.getInstance();
int currYear = currentDate.get( Calendar.YEAR );
int startYear = currYear - PERIODRANGE_PREVYEARS;
int endYear = currYear + PERIODRANGE_FUTUREYEARS;
if ( periodType.getName().equals( QuarterlyPeriodType.NAME )
|| periodType.getName().equals( SixMonthlyPeriodType.NAME )
|| periodType.getName().equals( SixMonthlyAprilPeriodType.NAME )
|| periodType.getName().equals( YearlyPeriodType.NAME )
|| periodType.getName().equals( FinancialAprilPeriodType.NAME )
|| periodType.getName().equals( FinancialJulyPeriodType.NAME )
|| periodType.getName().equals( FinancialOctoberPeriodType.NAME ) )
{
startYear = currYear - PERIODRANGE_PREVYEARS_YEARLY;
endYear = currYear + PERIODRANGE_FUTUREYEARS_YEARLY;
}
period.setStartDate( DateUtils.getMediumDate( String.valueOf( startYear ) + "-01-01" ) );
period.setEndDate( DateUtils.getMediumDate( String.valueOf( endYear ) + "-01-01" ) );
return period;
}
private void setCheckboxAppearance( PdfFormField checkboxfield, PdfContentByte canvas, float width )
{
PdfAppearance[] onOff = new PdfAppearance[2];
onOff[0] = canvas.createAppearance( width + 2, width + 2 );
onOff[0].rectangle( 1, 1, width, width );
onOff[0].stroke();
onOff[1] = canvas.createAppearance( width + 2, width + 2 );
onOff[1].setRGBColorFill( 255, 128, 128 );
onOff[1].rectangle( 1, 1, width, width );
onOff[1].fillStroke();
onOff[1].moveTo( 1, 1 );
onOff[1].lineTo( width + 1, width + 1 );
onOff[1].moveTo( 1, width + 1 );
onOff[1].lineTo( width + 1, 1 );
onOff[1].stroke();
checkboxfield.setAppearance( PdfAnnotation.APPEARANCE_NORMAL, "Off", onOff[0] );
checkboxfield.setAppearance( PdfAnnotation.APPEARANCE_NORMAL, "On", onOff[1] );
}
}
| chore: Use explicit constructor for wiring (#9738)
| dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/pdfform/DefaultPdfDataEntryFormService.java | chore: Use explicit constructor for wiring (#9738) |
|
Java | mit | 60fc42b76fc680a151b4438c4611c05f1de7a9b0 | 0 | bencvt/LibShapeDraw,bencvt/LibShapeDraw | import java.lang.reflect.Field;
import libshapedraw.ApiInfo;
import libshapedraw.MinecraftAccess;
import libshapedraw.internal.LSDController;
import libshapedraw.internal.LSDUtil;
import libshapedraw.primitive.ReadonlyVector3;
import libshapedraw.primitive.Vector3;
import net.minecraft.client.Minecraft;
/**
* Internal class. Client code using the API should ignore this.
* Rather, instantiate LibShapeDraw.
* <p>
* This is a ModLoader mod that links itself to the internal API Controller,
* providing it data and events from Minecraft. This class does the bare
* minimum of processing before passing these off to the Controller. I.e., this
* class is a thin wrapper for Minecraft used by LibShapeDraw.
* <p>
* As a wrapper, all direct interaction with Minecraft objects passes through
* this class, making the LibShapeDraw API itself clean and free of obfuscated
* code. (There is a single exception: LSDModDirectory.DIRECTORY.)
*/
public class mod_LibShapeDraw extends BaseMod implements MinecraftAccess {
/**
* The official Minecraft API will eventually provide standard entry points
* for adding rendering hooks, but in the meantime we have to do some
* hackish stuff to add our hook.
* <p>
* Option 1 is the naive, quick-and-dirty method: patch or proxy the
* EntityRender class. However this class is already being modified by many
* mods, including Optifine, ModLoader, and Forge. Introducing yet another
* mutually incompatible mod is a poor choice. Compatibility is a key goal
* of LibShapeDraw.
* <p>
* Option 2 is to use Forge's hooks. This is also not an acceptable option:
* not everyone uses Forge. LibShapeDraw supports Forge but does not
* require it.
* <p>
* Option 3 is to register a fake entity and add our render hook to it.
* This is a valid, highly-compatible approach, used successfully by
* several mods (including LibShapeDraw v1.0). However this has a key
* drawback: entities are rendered before water, clouds, and other
* elements. This can result in ugly graphical glitches when rendering
* shapes near water.
* <p>
* Option 4, which is what this class implements, is an even more egregious
* hack than option 1 or 3. The Profiler class is of course intended for
* debugging, gathering metrics on how long it takes to render each
* element.
* <p>
* As it happens, the point at which we want to insert our hook occurs just
* before the player's hand is rendered. The profiler's context gets
* switched at this point, giving us our hook! Furthermore, we're able to
* proxy the Profiler class instead of modifying it directly, fulfilling
* another one of LibShapeDraw's goals: no bytecode modification of vanilla
* classes.
* <p>
* This doesn't guarantee compatibility with every mod: If another mod is
* trying to proxy the Profiler class as well for some reason, Bad Things
* might happen. If in EntityRender.renderWorld the call to
* Profiler.endStartSection("hand") is removed by another mod patching that
* class, Bad Things will definitely happen. We can and do check for these
* cases, however.
* <p>
* Anyway, this method is a roundabout hack, but it works. It will almost
* certainly break at some point when the rendering engine is overhauled in
* Minecraft 1.5, but this is also when the official Minecraft API is
* scheduled to finally be released.
* <p>
* The sooner the better.
*/
// obf: Profiler
public class ProfilerProxy extends jx {
// obf: endStartSection
@Override
public void c(String sectionName) {
if (sectionName.equals("hand")) {
float partialTick = getPartialTick();
// obf: Minecraft.gameSettings, GameSettings.hideGUI, Minecraft.currentScreen
controller.render(getPlayerCoords(partialTick), minecraft.y.R && minecraft.r == null);
renderHeartbeat = true;
}
super.c(sectionName);
}
}
private Minecraft minecraft;
private ari timer; // obf: Timer
private LSDController controller;
private boolean renderHeartbeat;
private boolean renderHeartbroken;
private Object curWorld;
private axb curPlayer; // obf: EntityClientPlayerMP
private Integer curDimension;
public mod_LibShapeDraw() {
controller = LSDController.getInstance();
controller.initialize(this);
}
@Override
public String getName() {
return ApiInfo.getName();
}
@Override
public String getVersion() {
return ApiInfo.getVersion();
}
@Override
public void load() {
// obf: Minecraft.getMinecraft
minecraft = Minecraft.x();
// Get a reference to Minecraft's timer so we can get the partial
// tick time for rendering (it's not passed to the profiler directly).
// obf: Timer
timer = (ari) LSDUtil.getFieldValue(LSDUtil.getFieldByType(Minecraft.class, ari.class, 0), minecraft);
installRenderHook();
ModLoader.setInGameHook(this, true, true); // game ticks only, not every render frame.
LSDController.getLog().info(getClass().getName() + " loaded");
}
/** Use reflection to install the profiler proxy class. */
private void installRenderHook() {
Class<? super ProfilerProxy> profilerClass = ProfilerProxy.class.getSuperclass();
Field fieldProfiler = LSDUtil.getFieldByType(Minecraft.class, profilerClass, 0);
Object profilerOrig = LSDUtil.getFieldValue(fieldProfiler, minecraft);
if (profilerOrig.getClass() != profilerClass) {
// We probably overwrote some other mod's hook. :-(
LSDController.getLog().warning("mod incompatibility detected: profiler already proxied!");
}
ProfilerProxy profilerProxy = new ProfilerProxy();
LSDUtil.setFinalField(fieldProfiler, minecraft, profilerProxy);
// Copy all field values from origProfiler to newProfiler
for (Field f : profilerClass.getDeclaredFields()) {
f.setAccessible(true);
Object origValue = LSDUtil.getFieldValue(f, profilerOrig);
LSDUtil.setFinalField(f, profilerProxy, origValue);
LSDController.getLog().fine("copied profiler field " +
f + " = " + String.valueOf(origValue));
}
}
// obf: NetClientHandler
@Override
public void clientConnect(awq netClientHandler) {
LSDController.getLog().info(getClass().getName() + " new server connection");
curWorld = null;
curPlayer = null;
curDimension = null;
}
@Override
public boolean onTickInGame(float partialTick, Minecraft minecraft) {
ReadonlyVector3 playerCoords = getPlayerCoords(partialTick);
// obf: Minecraft.theWorld, Minecraft.thePlayer
if (curWorld != minecraft.e || curPlayer != minecraft.g) {
curWorld = minecraft.e;
curPlayer = minecraft.g;
// Dispatch respawn event to Controller.
int newDimension = curPlayer.ap; // obf: Entity.dimension
controller.respawn(playerCoords,
curDimension == null,
curDimension == null || curDimension != newDimension);
curDimension = newDimension;
}
// Dispatch game tick event to Controller.
controller.gameTick(playerCoords);
// Make sure our render hook is still working.
// obf: skipRenderWorld
if (!renderHeartbeat && !renderHeartbroken && !minecraft.w) {
// Some other mod probably overwrote our hook. :-(
LSDController.getLog().warning("mod incompatibility detected: render hook not working!");
renderHeartbroken = true; // don't spam log
}
renderHeartbeat = false;
return true;
}
/**
* Get the player's current coordinates, adjusted for movement that occurs
* between game ticks.
*/
private ReadonlyVector3 getPlayerCoords(float partialTick) {
if (curPlayer == null) {
return Vector3.ZEROS;
}
// obf: Entity.prevPosX, Entity.prevPosY, Entity.prevPosZ, Entity.posX, Entity.posY, Entity.posZ
return new Vector3(
curPlayer.q + partialTick*(curPlayer.t - curPlayer.q),
curPlayer.r + partialTick*(curPlayer.u - curPlayer.r),
curPlayer.s + partialTick*(curPlayer.v - curPlayer.s));
}
// ====
// MinecraftAccess implementation
// ====
@Override
public MinecraftAccess startDrawing(int mode) {
// obf: Tessellator.instance, Tessellator.startDrawing
aza.a.b(mode);
return this;
}
@Override
public MinecraftAccess addVertex(double x, double y, double z) {
// obf: Tessellator.instance, Tessellator.addVertex
aza.a.a(x, y, z);
return this;
}
@Override
public MinecraftAccess addVertex(ReadonlyVector3 coords) {
// obf: Tessellator.instance, Tessellator.addVertex
aza.a.a(coords.getX(), coords.getY(), coords.getZ());
return this;
}
@Override
public MinecraftAccess finishDrawing() {
// obf: Tessellator.instance, Tessellator.draw
aza.a.a();
return this;
}
@Override
public MinecraftAccess enableStandardItemLighting() {
// obf: RenderHelper.enableStandardItemLighting
aqi.b();
return this;
}
@Override
public MinecraftAccess sendChatMessage(String message) {
boolean visible = chatWindowExists();
LSDController.getLog().info("sendChatMessage visible=" + visible + " message=" + message);
if (visible) {
// obf: Minecraft.ingameGUI, GuiIngame.getChatGUI, GuiNewChat.printChatMessage
minecraft.v.b().a(message);
}
return this;
}
@Override
public boolean chatWindowExists() {
// obf: Minecraft.ingameGUI, GuiIngame.getChatGUI
return minecraft != null && minecraft.v != null && minecraft.v.b() != null;
}
@Override
public float getPartialTick() {
// obf: Timer.renderPartialTicks
return timer == null ? 0.0F : timer.c;
}
}
| src/main/java/mod_LibShapeDraw.java | import java.lang.reflect.Field;
import libshapedraw.ApiInfo;
import libshapedraw.MinecraftAccess;
import libshapedraw.internal.LSDController;
import libshapedraw.internal.LSDUtil;
import libshapedraw.primitive.ReadonlyVector3;
import libshapedraw.primitive.Vector3;
import net.minecraft.client.Minecraft;
/**
* Internal class. Client code using the API should ignore this.
* Rather, instantiate LibShapeDraw.
* <p>
* This is a ModLoader mod that links itself to the internal API Controller,
* providing it data and events from Minecraft. This class does the bare
* minimum of processing before passing these off to the Controller. I.e., this
* class is a thin wrapper for Minecraft used by LibShapeDraw.
* <p>
* As a wrapper, all direct interaction with Minecraft objects passes through
* this class, making the LibShapeDraw API itself clean and free of obfuscated
* code. (There is a single exception: LSDModDirectory.DIRECTORY.)
*/
public class mod_LibShapeDraw extends BaseMod implements MinecraftAccess {
/**
* The official Minecraft API will eventually provide standard entry points
* for adding rendering hooks, but in the meantime we have to do some
* hackish stuff to add our hook.
* <p>
* Option 1 is the naive, quick-and-dirty method: patch or proxy the
* EntityRender class. However this class is already being modified by many
* mods, including Optifine, ModLoader, and Forge. Introducing yet another
* mutually incompatible mod is a poor choice. Compatibility is a key goal
* of LibShapeDraw.
* <p>
* Option 2 is to use Forge's hooks. This is also not an acceptable option:
* not everyone uses Forge. LibShapeDraw supports Forge but does not
* require it.
* <p>
* Option 3 is to register a fake entity and add our render hook to it.
* This is a valid, highly-compatible approach, used successfully by
* several mods (including LibShapeDraw v1.0). However this has a key
* drawback: entities are rendered before water, clouds, and other
* elements. This can result in ugly graphical glitches when rendering
* shapes near water.
* <p>
* Option 4, which is what this class implements, is an even more egregious
* hack than option 1 or 3. The Profiler class is of course intended for
* debugging, gathering metrics on how long it takes to render each
* element.
* <p>
* As it happens, the point at which we want to insert our hook occurs just
* before the player's hand is rendered. The profiler's context gets
* switched at this point, giving us our hook! Furthermore, we're able to
* proxy the Profiler class instead of modifying it directly, fulfilling
* another one of LibShapeDraw's goals: no bytecode modification of vanilla
* classes.
* <p>
* This doesn't guarantee compatibility with every mod: If another mod is
* trying to proxy the Profiler class as well for some reason, Bad Things
* might happen. If in EntityRender.renderWorld the call to
* Profiler.endStartSection("hand") is removed by another mod patching that
* class, Bad Things will definitely happen. We can and do check for these
* cases, however.
* <p>
* Anyway, this method is a roundabout hack, but it works. It will almost
* certainly break at some point when the rendering engine is overhauled in
* Minecraft 1.5, but this is also when the official Minecraft API is
* scheduled to finally be released.
* <p>
* The sooner the better.
*/
// obf: Profiler
public class ProfilerProxy extends jx {
// obf: endStartSection
@Override
public void c(String sectionName) {
if (sectionName.equals("hand")) {
float partialTick = getPartialTick();
// obf: Minecraft.gameSettings, GameSettings.hideGUI, Minecraft.currentScreen
controller.render(getPlayerCoords(partialTick), minecraft.y.R && minecraft.r == null);
renderHeartbeat = true;
}
super.c(sectionName);
}
}
private Minecraft minecraft;
private ari timer; // obf: Timer
private LSDController controller;
private boolean renderHeartbeat;
private boolean renderHeartbroken;
private awy curWorld; // obf: WorldClient
private axb curPlayer; // obf: EntityClientPlayerMP
private Integer curDimension;
public mod_LibShapeDraw() {
controller = LSDController.getInstance();
controller.initialize(this);
}
@Override
public String getName() {
return ApiInfo.getName();
}
@Override
public String getVersion() {
return ApiInfo.getVersion();
}
@Override
public void load() {
// obf: Minecraft.getMinecraft
minecraft = Minecraft.x();
// Get a reference to Minecraft's timer so we can get the partial
// tick time for rendering (it's not passed to the profiler directly).
// obf: Timer
timer = (ari) LSDUtil.getFieldValue(LSDUtil.getFieldByType(Minecraft.class, ari.class, 0), minecraft);
installRenderHook();
ModLoader.setInGameHook(this, true, true); // game ticks only, not every render frame.
LSDController.getLog().info(getClass().getName() + " loaded");
}
/** Use reflection to install the profiler proxy class. */
private void installRenderHook() {
// obf: Profiler
Field fieldProfiler = LSDUtil.getFieldByType(Minecraft.class, jx.class, 0);
jx profilerOrig = (jx) LSDUtil.getFieldValue(fieldProfiler, minecraft);
if (profilerOrig.getClass() != jx.class) {
// We probably overwrote some other mod's hook. :-(
LSDController.getLog().warning("mod incompatibility detected: profiler already proxied!");
}
ProfilerProxy profilerProxy = new ProfilerProxy();
LSDUtil.setFinalField(fieldProfiler, minecraft, profilerProxy);
// Copy all field values from origProfiler to newProfiler
for (Field f : jx.class.getDeclaredFields()) {
f.setAccessible(true);
Object origValue = LSDUtil.getFieldValue(f, profilerOrig);
LSDUtil.setFinalField(f, profilerProxy, origValue);
LSDController.getLog().fine("copied profiler field " +
f + " = " + String.valueOf(origValue));
}
}
// obf: NetClientHandler
@Override
public void clientConnect(awq netClientHandler) {
LSDController.getLog().info(getClass().getName() + " new server connection");
curWorld = null;
curPlayer = null;
curDimension = null;
}
@Override
public boolean onTickInGame(float partialTick, Minecraft minecraft) {
ReadonlyVector3 playerCoords = getPlayerCoords(partialTick);
if (curWorld != minecraft.e || curPlayer != minecraft.g) {
curWorld = minecraft.e; // obf: Minecraft.theWorld
curPlayer = minecraft.g; // obf: Minecraft.thePlayer
// Dispatch respawn event to Controller.
int newDimension = curPlayer.ap; // obf: Entity.dimension
controller.respawn(playerCoords,
curDimension == null,
curDimension == null || curDimension != newDimension);
curDimension = newDimension;
}
// Dispatch game tick event to Controller.
controller.gameTick(playerCoords);
// Make sure our render hook is still working.
// obf: skipRenderWorld
if (!renderHeartbeat && !renderHeartbroken && !minecraft.w) {
// Some other mod probably overwrote our hook. :-(
LSDController.getLog().warning("mod incompatibility detected: render hook not working!");
renderHeartbroken = true; // don't spam log
}
renderHeartbeat = false;
return true;
}
/**
* Get the player's current coordinates, adjusted for movement that occurs
* between game ticks.
*/
private ReadonlyVector3 getPlayerCoords(float partialTick) {
if (curPlayer == null) {
return Vector3.ZEROS;
}
// obf: Entity.prevPosX, Entity.prevPosY, Entity.prevPosZ, Entity.posX, Entity.posY, Entity.posZ
return new Vector3(
curPlayer.q + partialTick*(curPlayer.t - curPlayer.q),
curPlayer.r + partialTick*(curPlayer.u - curPlayer.r),
curPlayer.s + partialTick*(curPlayer.v - curPlayer.s));
}
// ====
// MinecraftAccess implementation
// ====
@Override
public MinecraftAccess startDrawing(int mode) {
// obf: Tessellator.instance, Tessellator.startDrawing
aza.a.b(mode);
return this;
}
@Override
public MinecraftAccess addVertex(double x, double y, double z) {
// obf: Tessellator.instance, Tessellator.addVertex
aza.a.a(x, y, z);
return this;
}
@Override
public MinecraftAccess addVertex(ReadonlyVector3 coords) {
// obf: Tessellator.instance, Tessellator.addVertex
aza.a.a(coords.getX(), coords.getY(), coords.getZ());
return this;
}
@Override
public MinecraftAccess finishDrawing() {
// obf: Tessellator.instance, Tessellator.draw
aza.a.a();
return this;
}
@Override
public MinecraftAccess enableStandardItemLighting() {
// obf: RenderHelper.enableStandardItemLighting
aqi.b();
return this;
}
@Override
public MinecraftAccess sendChatMessage(String message) {
boolean visible = chatWindowExists();
LSDController.getLog().info("sendChatMessage visible=" + visible + " message=" + message);
if (visible) {
// obf: Minecraft.ingameGUI, GuiIngame.getChatGUI, GuiNewChat.printChatMessage
minecraft.v.b().a(message);
}
return this;
}
@Override
public boolean chatWindowExists() {
// obf: Minecraft.ingameGUI, GuiIngame.getChatGUI
return minecraft != null && minecraft.v != null && minecraft.v.b() != null;
}
@Override
public float getPartialTick() {
// obf: Timer.renderPartialTicks
return timer == null ? 0.0F : timer.c;
}
}
| refactor to make fewer references to obfuscated class names
| src/main/java/mod_LibShapeDraw.java | refactor to make fewer references to obfuscated class names |
|
Java | mit | 80dffdab05cc955244ded04614107456d9b914dd | 0 | KMU-bank/KMU-Bank | package view;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Scanner;
import client.Client;
public class View {
static Scanner sc = new Scanner(System.in);
public void Title(){
System.out.println("***************************************************************************");
System.out.println("* *");
System.out.println("* ■■■■■■ ■ ■■ ■ ■ ■ ■ *");
System.out.println("* ■ ■ ■ ■■ ■■ ■■ ■ ■ *");
System.out.println("* ■ ■ ■ ■■ ■ ■ ■ ■ ■ ■ *");
System.out.println("* ■■■■■■ ■■■■ ■■ ■ ■ ■■ ■ ■ ■ ■ ■ ■ *");
System.out.println("* ■ ■ ■ ■ ■ ■ ■ ■■ ■■ ■ ■ ■ ■ ■ ■ *");
System.out.println("* ■ ■ ■ ■ ■ ■ ■ ■ ■■ ■ ■ ■ ■ ■■ ■■ *");
System.out.println("* ■■■■■■ ■■■■ ■ ■ ■■ ■ ■■ ■ ■ ■ ■■■■■ *");
System.out.println("* *");
System.out.println("***************************************************************************");
}
//------------------------------------------------------------------------------ 시작 화면
public void Start_Page(){
System.out.println(" 1. 사용자 생성");
System.out.println(" 2. 사용자 삭제");
System.out.println(" 3. 사용자 선택");
System.out.print("Select Number : ");
}
public void User_List(HashMap<Integer, Client> clients){
for(int i=1; i<100; i++)
if(clients.get(i) != null)
System.out.println(i + ". " + clients.get(i).getName());
}
public void User_Select(HashMap<Integer, Client> clients){
System.out.println(" 사용자 리스트");
User_List(clients);
System.out.println("0. 뒤로가기");
System.out.print(" 사용자 선택 : ");
}
public void User_Create(){
System.out.println(" 사용자 이름을 입력해주세요.");
System.out.print(" 사용자 이름 : ");
}
//------------------------------------------------------------------------------ 사용자 선택 화면 이후
public void User_Delete(HashMap<Integer, Client> clients){
System.out.println(" 사용자 리스트");
User_List(clients);
System.out.print(" 삭제할 사용자 : ");
System.out.print("Select Number : ");
}
public void Bank_Select(){
System.out.println(" 은행을 선택해주세요");
System.out.println(" 1. KB");
System.out.println(" 2. NH");
System.out.println(" 3. SH");
System.out.print("Select Number : ");
}
public void Banking(){
System.out.println(" 1. 입금");
System.out.println(" 2. 출금");
System.out.println(" 3. 이체");
System.out.println(" 4. 조회");
System.out.println("------------------------");
System.out.println(" 5. 대출");
System.out.println(" 6. 대출 상환");
System.out.println("------------------------");
System.out.println(" 7. 계좌 삭제");
System.out.println("------------------------");
System.out.println(" 8. 1년 후...");
System.out.print("Select Number : ");
}
//------------------------------------------------------------------------------ 메뉴 함수
public void currentBalance(int balance){
System.out.println(" 현재 잔고 : " + balance);
}
public void currentDebt(int debt){
System.out.println("------------------------------");
System.out.println(" 현재 대출 금액 : " + (debt));
}
public void Deposit(){
System.out.print(" 입금할 금액 : ");
}
public void Withdraw(){
System.out.print(" 출금할 금액 : ");
}
public Object[] Transfer(){
Object[] AccountNumberNMoney = new Object[2];
System.out.print(" 계좌번호를 입력해 주세요.\n 계좌번호 : ");
AccountNumberNMoney[0] = sc.next();
System.out.println(" 보내실 금액을 입력해주세요.");
AccountNumberNMoney[1] = sc.nextInt();
return AccountNumberNMoney;
}
public void State_List(LinkedList<String> stateList){
System.out.println(" 거래 내역을 출력합니다.");
for(int i =0; i<stateList.size(); i++){
System.out.println(stateList.get(i));
}
}
public void Loan(){
System.out.println(" 대출하실 금액을 입력해 주시기 바랍니다.");
System.out.print(" 대출 금액 : ");
}
public void Repay(){
System.out.println(" 상환하실 금액을 입력해 주시기 바랍니다.");
System.out.print(" 상환 금액 : ");
}
public void Time_Leap(){
System.out.println(" 1년 후로 이동합니다. (푸슝---~=★!)");
}
//------------------------------------------------------------------------------ 계정 관련 함수
public void no_Account(){
System.out.print(" 계좌가 존재하지 않습니다. 계좌를 생성하시겠습니까? ( y / n ) : ");
}
public void Acount_Create(){
System.out.println(" 계좌가 생성되었습니다.");
}
public void Acount_Delete(int bal){
System.out.println(" 계좌를 삭제합니다.");
System.out.println(" 계좌 내의 잔금 " + bal + "원이 자동으로 출금되었습니다.");
}
}
| src/view/View.java | package view;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Scanner;
import client.Client;
public class View {
static Scanner sc = new Scanner(System.in);
public void Title(){
System.out.println("***************************************************************************");
System.out.println("* *");
System.out.println("* ■■■■■■ ■ ■■ ■ ■ ■ ■ *");
System.out.println("* ■ ■ ■ ■■ ■■ ■■ ■ ■ *");
System.out.println("* ■ ■ ■ ■■ ■ ■ ■ ■ ■ ■ *");
System.out.println("* ■■■■■■ ■■■■ ■■ ■ ■ ■■ ■ ■ ■ ■ ■ ■ *");
System.out.println("* ■ ■ ■ ■ ■ ■ ■ ■■ ■■ ■ ■ ■ ■ ■ ■ *");
System.out.println("* ■ ■ ■ ■ ■ ■ ■ ■ ■■ ■ ■ ■ ■ ■■ ■■ *");
System.out.println("* ■■■■■■ ■■■■ ■ ■ ■■ ■ ■■ ■ ■ ■ ■■■■■ *");
System.out.println("* *");
System.out.println("***************************************************************************");
}
//------------------------------------------------------------------------------ 시작 화면
public void Start_Page(){
System.out.println(" 1. 사용자 생성");
System.out.println(" 2. 사용자 삭제");
System.out.println(" 3. 사용자 선택");
System.out.print("Select Number : ");
}
public void User_List(HashMap<Integer, Client> clients){
for(int i=1; i<100; i++)
if(clients.get(i) != null)
System.out.println(i + ". " + clients.get(i).getName());
}
public void User_Select(HashMap<Integer, Client> clients){
System.out.println(" 사용자 리스트");
User_List(clients);
System.out.println("0. 뒤로가기");
System.out.print(" 사용자 선택 : ");
}
public void User_Create(){
System.out.println(" 사용자 이름을 입력해주세요.");
System.out.print(" 사용자 이름 : ");
}
//------------------------------------------------------------------------------ 사용자 선택 화면 이후
public void User_Delete(HashMap<Integer, Client> clients){
System.out.println(" 사용자 리스트");
User_List(clients);
System.out.print(" 삭제할 사용자 : ");
System.out.print("Select Number : ");
}
public void Bank_Select(){
System.out.println(" 은행을 선택해주세요");
System.out.println(" 1. KB");
System.out.println(" 2. NH");
System.out.println(" 3. SH");
System.out.print("Select Number : ");
}
public void Banking(){
System.out.println(" 1. 입금");
System.out.println(" 2. 출금");
System.out.println(" 3. 이체");
System.out.println(" 4. 조회");
System.out.println("------------------------");
System.out.println(" 5. 대출");
System.out.println(" 6. 대출 상환");
System.out.println("------------------------");
System.out.println(" 7. 계좌 삭제");
System.out.println("------------------------");
System.out.println(" 8. 1년 후...");
System.out.print("Select Number : ");
}
//------------------------------------------------------------------------------ 메뉴 함수
public void currentBalance(int balance){
System.out.println(" 현재 잔고 : " + balance);
}
public void currentDebt(int debt){
System.out.println("------------------------------");
System.out.println(" 현재 대출 금액 : " + (debt));
}
public void Deposit(){
System.out.print(" 입금할 금액 : ");
}
public void Withdraw(){
System.out.print(" 출금할 금액 : ");
}
public Object[] Transfer(){
Object[] AccountNumberNMoney = new Object[2];
System.out.print(" 계좌번호를 입력해 주세요.\n 계좌번호 : ");
AccountNumberNMoney[0] = sc.next();;
System.out.println(" 보내실 금액을 입력해주세요.");
AccountNumberNMoney[1] = sc.nextInt();
return AccountNumberNMoney;
}
public void State_List(LinkedList<String> stateList){
System.out.println(" 거래 내역을 출력합니다.");
for(int i =0; i<stateList.size(); i++){
System.out.println(stateList.get(i));
}
}
public void Loan(){
System.out.println(" 대출하실 금액을 입력해 주시기 바랍니다.");
System.out.print(" 대출 금액 : ");
}
public void Repay(){
System.out.println(" 상환하실 금액을 입력해 주시기 바랍니다.");
System.out.print(" 상환 금액 : ");
}
public void Time_Leap(){
System.out.println(" 1년 후로 이동합니다. (푸슝---~=★!)");
}
//------------------------------------------------------------------------------ 계정 관련 함수
public void no_Account(){
System.out.print(" 계좌가 존재하지 않습니다. 계좌를 생성하시겠습니까? ( y / n ) : ");
}
public void Acount_Create(){
System.out.println(" 계좌가 생성되었습니다.");
}
public void Acount_Delete(int bal){
System.out.println(" 계좌를 삭제합니다.");
System.out.println(" 계좌 내의 잔금 " + bal + "원이 자동으로 출금되었습니다.");
}
}
| revise view
| src/view/View.java | revise view |
|
Java | lgpl-2.1 | f6e63ff13f1f042a4be9dbe3a900b737ec92ab61 | 0 | dpoldrugo/proxyma | package m.c.m.proxyma.plugins.transformers;
import java.net.URL;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import m.c.m.proxyma.buffers.ByteBuffer;
import m.c.m.proxyma.buffers.ByteBufferFactory;
import m.c.m.proxyma.buffers.ByteBufferReader;
import m.c.m.proxyma.context.ProxymaContext;
import m.c.m.proxyma.resource.ProxymaHttpHeader;
import m.c.m.proxyma.resource.ProxymaResource;
import m.c.m.proxyma.resource.ProxymaResponseDataBean;
import m.c.m.proxyma.rewrite.URLRewriteEngine;
import org.htmlparser.Attribute;
import org.htmlparser.Parser;
import org.htmlparser.Tag;
import org.htmlparser.lexer.Lexer;
import org.htmlparser.lexer.Page;
import org.htmlparser.tags.ScriptTag;
import org.htmlparser.util.NodeList;
import org.htmlparser.visitors.NodeVisitor;
/**
* <p>
* This plugin implements a basic and not complete Javascript Rewriter.<br/>
* Its scans the html pages and the JS files seraching for javascript links.<br/>
* If any link is found it will be rewritten it in order to masquerde its real source.
* </p><p>
* NOTE: this software is released under GPL License.
* See the LICENSE of this distribution for more informations.
* </p>
*
* @author Marco Casavecchia Morganti (marcolinuz) [marcolinuz-at-gmail.com]
* @version $Id: JSRewriteTransformer.java 184 2010-07-20 22:26:21Z marcolinuz $
*/
public class JSRewriteTransformer extends m.c.m.proxyma.plugins.transformers.AbstractTransformer {
/**
* The default constructor for this class<br/>
* It prepares the context logger and the logger for the access-log.
*
* NOTE: Every plugin must have a constructor that takes a ProxymaContext as parameter.
*/
public JSRewriteTransformer(ProxymaContext context) {
//initialize the logger
this.log = context.getLogger();
this.rewriter = new URLRewriteEngine(context);
}
/**
* It scans the HTML page or the JS file contained into the response searching
* for any URL.<br/>
* When it finds an URL relative to a configured proxy folders,
* it uses the UrlRewriterEngine to modify the URL.<br/>
* @param aResource any ProxymaResource
*/
@Override
public void process(ProxymaResource aResource) throws Exception {
ProxymaResponseDataBean originalResponse = aResource.getResponse().getResponseData();
ProxymaHttpHeader contentType = originalResponse.getHeader(CONTENT_TYPE_HEADER);
if (contentType == null) {
log.fine(String.format("No %s header", CONTENT_TYPE_HEADER));
return;
}
Matcher htmlTypeMatcher = htmlContentTypePattern.matcher(contentType.getValue());
// The plugin works only on Javascript documents or fragments
if (contentType != null && (originalResponse.getData() != null)) {
if (htmlTypeMatcher.matches()) {
log.fine("This is an Html Page, searching for JS URLs.");
/**
* Inner Class for the html analisys.
*/
final NodeVisitor linkVisitor = new NodeVisitor() {
@Override
public void visitTag(Tag tag) {
String name = tag.getTagName();
String tagValue = null;
//selects the appropriate action based upon the tag and the attribute types
//NOTE: probably this method will be improoved in the future because it doesn't handles
// all the Javascript events. I have also found some problem in the htmlparser
// library with pages that uses lot of javascript.
if (SCRIPT.equalsIgnoreCase(name)) {
tagValue = tag.getAttribute(SRC);
if (tagValue == null) {
String Language = ((ScriptTag) tag).getLanguage();
if ((Language != null) && (Language.toLowerCase().indexOf(JAVASCRIPT_SEGMENT) >= 0))
((ScriptTag) tag).setScriptCode(findAndRewriteJSLinks(((ScriptTag) tag).getScriptCode(), currentResource));
}
} else if (BODY.equalsIgnoreCase(name)) {
tagValue = tag.getAttribute(ONLOAD);
if (tagValue != null) {
tag.removeAttribute(ONLOAD);
Attribute attribute = new Attribute();
attribute.setName(ONLOAD);
attribute.setAssignment("=");
attribute.setRawValue("'" + findAndRewriteJSLinks(tagValue, currentResource) + "'");
tag.setAttributeEx(attribute);
}
findAndRewriteJsEvents(tag, currentResource);
} else if (A.equalsIgnoreCase(name) ||
IMG.equalsIgnoreCase(name) ||
LINK.equalsIgnoreCase(name) ||
FORM.equalsIgnoreCase(name) ||
INPUT.equalsIgnoreCase(name) ||
INPUT.equalsIgnoreCase(name) ||
OBJECT.equalsIgnoreCase(name)||
AREA.equalsIgnoreCase(name) ||
DEL.equalsIgnoreCase(name) ||
INS.equalsIgnoreCase(name)) {
findAndRewriteJsEvents(tag, currentResource);
}
}
};
//Generates a parser for the given page
String encoding = aResource.getContext().getDefaultEncoding();
Matcher charsetMatcher = charsetPattern.matcher(contentType.getValue());
if (charsetMatcher.find())
encoding = charsetMatcher.replaceFirst(EMPTY_STRING);
ByteBufferReader reader = ByteBufferFactory.createNewByteBufferReader(originalResponse.getData());
String content = new String(reader.getWholeBufferAsByteArray(), encoding);
Parser parser = new Parser(new Lexer(new Page(content, encoding)));
//Generate a linkvisitor for the url rewriting
NodeList myPage = parser.parse(null);
currentResource = aResource;
myPage.visitAllNodesWith(linkVisitor);
//Add to the response the rewritten data
byte[] rewrittenContent = myPage.toHtml(true).getBytes(encoding);
ByteBuffer rewrittenData = ByteBufferFactory.createNewByteBuffer(aResource.getContext());
rewrittenData.appendBytes(rewrittenContent, rewrittenContent.length);
//Substitute the page data with the rewritten data
originalResponse.setData(rewrittenData);
} else if (isProcessableTextResource(aResource)) {
//Get CSS file content
log.fine("This is a JS file, searching for JS URLs.");
String encoding = aResource.getContext().getDefaultEncoding();
Matcher charsetMatcher = charsetPattern.matcher(contentType.getValue());
if (charsetMatcher.find())
encoding = charsetMatcher.replaceFirst(EMPTY_STRING);
//Get the original CSS Data
ByteBufferReader reader = ByteBufferFactory.createNewByteBufferReader(originalResponse.getData());
String content = new String(reader.getWholeBufferAsByteArray(), encoding);
//Parse and Rewrite CSS Data
String newContent = findAndRewriteJSLinks(content, aResource);
//Add to the response the rewritten data
byte[] rewrittenContent = newContent.getBytes(encoding);
ByteBuffer rewrittenData = ByteBufferFactory.createNewByteBuffer(aResource.getContext());
rewrittenData.appendBytes(rewrittenContent, rewrittenContent.length);
//Substitute the page data with the rewritten data
originalResponse.setData(rewrittenData);
}
}
}
/**
* Returns the name of the plugin.
* @return the name of the plugin.
*/
@Override
public String getName() {
return name;
}
/**
* Returns a short description of what the plugin does..<br/>
* You can use html tags into it.<br/>
* The result of this method call can be used by any interface
* to explain for what is the puropse of the plugin.
*
* @return a short description of the plugin
*/
@Override
public String getHtmlDescription() {
return description;
}
/**
* Guess it the current resource is processable by this plugin
*
* @param aResource the current resource
* @return true if it's a CSS file.
*/
private boolean isProcessableTextResource (ProxymaResource aResource) {
boolean retValue = false;
ProxymaHttpHeader contentType = aResource.getResponse().getResponseData().getHeader(CONTENT_TYPE_HEADER);
Matcher textTypeMatcher = textContentTypePattern.matcher(contentType.getValue());
Matcher jsTypeMatcher = jsContentTypePattern.matcher(contentType.getValue());
if (jsTypeMatcher.matches())
retValue = true;
else if (textTypeMatcher.matches() && aResource.getRequest().getRequestURI().toLowerCase().endsWith(".js"))
retValue = true;
return retValue;
}
/**
* Inspect the passed text searching for cssLinks to rewrite and rewrites
* them using the Url Rewrite Engine.
*
* @param content the content of the CSS (or a css fragment)
* @return the new content with substituted urls.
*/
private String findAndRewriteJSLinks(String content, ProxymaResource aResource) {
Matcher linksMatcher = jsLinksPattern.matcher(content);
StringBuffer retVal = new StringBuffer(content.length());
//Perform the urls substitution..
while (linksMatcher.find()) {
log.finer("Found URL: " + linksMatcher.group(1));
linksMatcher.appendReplacement(retVal, replaceJSURL(linksMatcher.group(1), aResource));
}
linksMatcher.appendTail(retVal);
return retVal.toString();
}
/**
* Replaces a cssurl with the complete url directive
* (es: www.a/b/c -> url(/d/b/c) )
*
* @param theUrl the url to rewrite.
* @param aResouce the current resource
* @return the new url() css directive.
*/
private String replaceJSURL (String theUrl, ProxymaResource aResouce) {
URL proxymaRootURL = aResouce.getProxymaRootURL();
StringBuffer retValue = new StringBuffer(theUrl.length());
retValue.append("'");
retValue.append(proxymaRootURL.getProtocol());
retValue.append("://");
retValue.append(proxymaRootURL.getHost());
if (proxymaRootURL.getPort() > 0)
retValue.append(":").append(proxymaRootURL.getPort());
String masqueradeURL = rewriter.masqueradeURL(theUrl, aResouce);
if (masqueradeURL.toLowerCase().startsWith("http://"))
retValue = new StringBuffer("'"+masqueradeURL);
else {
retValue.append(masqueradeURL);
}
retValue.append("'");
log.finer("Rewritten URL to: " + retValue.toString());
return retValue.toString();
}
/**
* Search and rewrites urls into common javascript events
*
* @param tag the tag to serach for events..
* @param aResource the current Resource
*/
private void findAndRewriteJsEvents(Tag tag, ProxymaResource aResource) {
for (int i = 0; i < EVENTS.length; i++) {
String tagValue = tag.getAttribute(EVENTS[i]);
if (tagValue != null) {
tag.removeAttribute(EVENTS[i]);
Attribute attribute = new Attribute();
attribute.setName(EVENTS[i]);
attribute.setAssignment("=");
attribute.setRawValue("'" + findAndRewriteJSLinks(tagValue, aResource) + "'");
tag.setAttributeEx(attribute);
}
}
}
/**
* The logger of the context..
*/
private Logger log = null;
/**
* The rewriter engine capable to rewrite URLs and Cookies.
*/
private URLRewriteEngine rewriter = null;
/**
* The only way to share variable between this class and its nested companion
* is to have private attributes with the wanted values.
*/
private ProxymaResource currentResource = null;
/**
* This is the Regular Expressione that does most of the work.
* It's used to recognize links on CSS and rewrite them with the rewrite engine.
*/
private static final Pattern jsLinksPattern = Pattern.compile("(?:\"|\')(http(?:s)?://.*[^\"'])(?:\"|\')", Pattern.CASE_INSENSITIVE);
/**
* One of the values for the content type header that activates this plugin.
*/
private static final Pattern htmlContentTypePattern = Pattern.compile("^text/html.*$", Pattern.CASE_INSENSITIVE);
/**
* One of the values for the content type header that activates this plugin.
*/
private static final Pattern textContentTypePattern = Pattern.compile("^text/plain.*$", Pattern.CASE_INSENSITIVE);
/**
* One of the values for the content type header that activates this plugin.
*/
private static final Pattern jsContentTypePattern = Pattern.compile("^application/javascript.*$", Pattern.CASE_INSENSITIVE);
/**
* Charset match Pattern
*/
private static final Pattern charsetPattern = Pattern.compile("^.*; *charset *= *", Pattern.CASE_INSENSITIVE);
/**
* The content type header
*/
private static final String CONTENT_TYPE_HEADER = "Content-Type";
/**
* The name of this plugin.
*/
private static final String name = "Basic Javascript URL Rewriter";
/**
* A short html description of what it does.
*/
private static final String description = ""
+ "This plugin is a Basic Javescript Transformer.<br/>"
+ "Its purpose is to scan the html pages and the JavaScript libraries seraching for URLs.<br/>"
+ "If any URL (http...) is found, it will be rewritten it in order to force the client browser "
+ "to use proxyma to retrive it.";
//INSPECTED TAG NAMES
private final static String SCRIPT = "script";
private final static String BODY = "body";
private final static String A = "a";
private final static String IMG = "img";
private final static String LINK = "link";
private final static String FORM = "form";
private final static String INPUT = "input";
private final static String AREA = "area";
private final static String INS = "ins";
private final static String DEL = "del";
private final static String OBJECT = "del";
//INSPECTED ATTRIBUTES
public final static String SRC = "src";
private final static String ONLOAD = "onLoad";
private final static String JAVASCRIPT_SEGMENT = "javascript";
private final static String EVENTS[] = {"onClick", "onRollOver", "onRollOut", "onChange"};
//Only an empty string.
private final static String EMPTY_STRING = "";
}
| proxyma-core/src/main/java/m/c/m/proxyma/plugins/transformers/JSRewriteTransformer.java | package m.c.m.proxyma.plugins.transformers;
import java.net.URL;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import m.c.m.proxyma.buffers.ByteBuffer;
import m.c.m.proxyma.buffers.ByteBufferFactory;
import m.c.m.proxyma.buffers.ByteBufferReader;
import m.c.m.proxyma.context.ProxymaContext;
import m.c.m.proxyma.resource.ProxymaHttpHeader;
import m.c.m.proxyma.resource.ProxymaResource;
import m.c.m.proxyma.resource.ProxymaResponseDataBean;
import m.c.m.proxyma.rewrite.URLRewriteEngine;
import org.htmlparser.Attribute;
import org.htmlparser.Parser;
import org.htmlparser.Tag;
import org.htmlparser.lexer.Lexer;
import org.htmlparser.lexer.Page;
import org.htmlparser.tags.ScriptTag;
import org.htmlparser.util.NodeList;
import org.htmlparser.visitors.NodeVisitor;
/**
* <p>
* This plugin implements a basic and not complete Javascript Rewriter.<br/>
* Its scans the html pages and the JS files seraching for javascript links.<br/>
* If any link is found it will be rewritten it in order to masquerde its real source.
* </p><p>
* NOTE: this software is released under GPL License.
* See the LICENSE of this distribution for more informations.
* </p>
*
* @author Marco Casavecchia Morganti (marcolinuz) [marcolinuz-at-gmail.com]
* @version $Id: JSRewriteTransformer.java 184 2010-07-20 22:26:21Z marcolinuz $
*/
public class JSRewriteTransformer extends m.c.m.proxyma.plugins.transformers.AbstractTransformer {
/**
* The default constructor for this class<br/>
* It prepares the context logger and the logger for the access-log.
*
* NOTE: Every plugin must have a constructor that takes a ProxymaContext as parameter.
*/
public JSRewriteTransformer(ProxymaContext context) {
//initialize the logger
this.log = context.getLogger();
this.rewriter = new URLRewriteEngine(context);
}
/**
* It scans the HTML page or the JS file contained into the response searching
* for any URL.<br/>
* When it finds an URL relative to a configured proxy folders,
* it uses the UrlRewriterEngine to modify the URL.<br/>
* @param aResource any ProxymaResource
*/
@Override
public void process(ProxymaResource aResource) throws Exception {
ProxymaResponseDataBean originalResponse = aResource.getResponse().getResponseData();
ProxymaHttpHeader contentType = originalResponse.getHeader(CONTENT_TYPE_HEADER);
if (contentType == null) {
log.fine(String.format("No %s header", CONTENT_TYPE_HEADER));
return;
}
Matcher htmlTypeMatcher = htmlContentTypePattern.matcher(contentType.getValue());
// The plugin works only on Javascript documents or fragments
if (contentType != null && (originalResponse.getData() != null)) {
if (htmlTypeMatcher.matches()) {
log.fine("This is an Html Page, searching for JS URLs.");
/**
* Inner Class for the html analisys.
*/
final NodeVisitor linkVisitor = new NodeVisitor() {
@Override
public void visitTag(Tag tag) {
String name = tag.getTagName();
String tagValue = null;
//selects the appropriate action based upon the tag and the attribute types
//NOTE: probably this method will be improoved in the future because it doesn't handles
// all the Javascript events. I have also found some problem in the htmlparser
// library with pages that uses lot of javascript.
if (SCRIPT.equalsIgnoreCase(name)) {
tagValue = tag.getAttribute(SRC);
if (tagValue == null) {
String Language = ((ScriptTag) tag).getLanguage();
if ((Language != null) && (Language.toLowerCase().indexOf(JAVASCRIPT_SEGMENT) >= 0))
((ScriptTag) tag).setScriptCode(findAndRewriteJSLinks(((ScriptTag) tag).getScriptCode(), currentResource));
}
} else if (BODY.equalsIgnoreCase(name)) {
tagValue = tag.getAttribute(ONLOAD);
if (tagValue != null) {
tag.removeAttribute(ONLOAD);
Attribute attribute = new Attribute();
attribute.setName(ONLOAD);
attribute.setAssignment("=");
attribute.setRawValue("'" + findAndRewriteJSLinks(tagValue, currentResource) + "'");
tag.setAttributeEx(attribute);
}
findAndRewriteJsEvents(tag, currentResource);
} else if (A.equalsIgnoreCase(name) ||
IMG.equalsIgnoreCase(name) ||
LINK.equalsIgnoreCase(name) ||
FORM.equalsIgnoreCase(name) ||
INPUT.equalsIgnoreCase(name) ||
INPUT.equalsIgnoreCase(name) ||
OBJECT.equalsIgnoreCase(name)||
AREA.equalsIgnoreCase(name) ||
DEL.equalsIgnoreCase(name) ||
INS.equalsIgnoreCase(name)) {
findAndRewriteJsEvents(tag, currentResource);
}
}
};
//Generates a parser for the given page
String encoding = aResource.getContext().getDefaultEncoding();
Matcher charsetMatcher = charsetPattern.matcher(contentType.getValue());
if (charsetMatcher.find())
encoding = charsetMatcher.replaceFirst(EMPTY_STRING);
ByteBufferReader reader = ByteBufferFactory.createNewByteBufferReader(originalResponse.getData());
String content = new String(reader.getWholeBufferAsByteArray(), encoding);
Parser parser = new Parser(new Lexer(new Page(content, encoding)));
//Generate a linkvisitor for the url rewriting
NodeList myPage = parser.parse(null);
currentResource = aResource;
myPage.visitAllNodesWith(linkVisitor);
//Add to the response the rewritten data
byte[] rewrittenContent = myPage.toHtml(true).getBytes(encoding);
ByteBuffer rewrittenData = ByteBufferFactory.createNewByteBuffer(aResource.getContext());
rewrittenData.appendBytes(rewrittenContent, rewrittenContent.length);
//Substitute the page data with the rewritten data
originalResponse.setData(rewrittenData);
} else if (isProcessableTextResource(aResource)) {
//Get CSS file content
log.fine("This is a JS file, searching for JS URLs.");
String encoding = aResource.getContext().getDefaultEncoding();
Matcher charsetMatcher = charsetPattern.matcher(contentType.getValue());
if (charsetMatcher.find())
encoding = charsetMatcher.replaceFirst(EMPTY_STRING);
//Get the original CSS Data
ByteBufferReader reader = ByteBufferFactory.createNewByteBufferReader(originalResponse.getData());
String content = new String(reader.getWholeBufferAsByteArray(), encoding);
//Parse and Rewrite CSS Data
String newContent = findAndRewriteJSLinks(content, aResource);
//Add to the response the rewritten data
byte[] rewrittenContent = newContent.getBytes(encoding);
ByteBuffer rewrittenData = ByteBufferFactory.createNewByteBuffer(aResource.getContext());
rewrittenData.appendBytes(rewrittenContent, rewrittenContent.length);
//Substitute the page data with the rewritten data
originalResponse.setData(rewrittenData);
}
}
}
/**
* Returns the name of the plugin.
* @return the name of the plugin.
*/
@Override
public String getName() {
return name;
}
/**
* Returns a short description of what the plugin does..<br/>
* You can use html tags into it.<br/>
* The result of this method call can be used by any interface
* to explain for what is the puropse of the plugin.
*
* @return a short description of the plugin
*/
@Override
public String getHtmlDescription() {
return description;
}
/**
* Guess it the current resource is processable by this plugin
*
* @param aResource the current resource
* @return true if it's a CSS file.
*/
private boolean isProcessableTextResource (ProxymaResource aResource) {
boolean retValue = false;
ProxymaHttpHeader contentType = aResource.getResponse().getResponseData().getHeader(CONTENT_TYPE_HEADER);
Matcher textTypeMatcher = textContentTypePattern.matcher(contentType.getValue());
Matcher jsTypeMatcher = jsContentTypePattern.matcher(contentType.getValue());
if (jsTypeMatcher.matches())
retValue = true;
else if (textTypeMatcher.matches() && aResource.getRequest().getRequestURI().toLowerCase().endsWith(".js"))
retValue = true;
return retValue;
}
/**
* Inspect the passed text searching for cssLinks to rewrite and rewrites
* them using the Url Rewrite Engine.
*
* @param content the content of the CSS (or a css fragment)
* @return the new content with substituted urls.
*/
private String findAndRewriteJSLinks(String content, ProxymaResource aResource) {
Matcher linksMatcher = jsLinksPattern.matcher(content);
StringBuffer retVal = new StringBuffer(content.length());
//Perform the urls substitution..
while (linksMatcher.find()) {
log.finer("Found URL: " + linksMatcher.group(1));
linksMatcher.appendReplacement(retVal, replaceJSURL(linksMatcher.group(1), aResource));
}
linksMatcher.appendTail(retVal);
return retVal.toString();
}
/**
* Replaces a cssurl with the complete url directive
* (es: www.a/b/c -> url(/d/b/c) )
*
* @param theUrl the url to rewrite.
* @param aResouce the current resource
* @return the new url() css directive.
*/
private String replaceJSURL (String theUrl, ProxymaResource aResouce) {
URL proxymaRootURL = aResouce.getProxymaRootURL();
StringBuffer retValue = new StringBuffer(theUrl.length());
retValue.append("'");
retValue.append(proxymaRootURL.getProtocol());
retValue.append("://");
retValue.append(proxymaRootURL.getHost());
if (proxymaRootURL.getPort() > 0)
retValue.append(":").append(proxymaRootURL.getPort());
retValue.append(rewriter.masqueradeURL(theUrl, aResouce));
retValue.append("'");
log.finer("Rewritten URL to: " + retValue.toString());
return retValue.toString();
}
/**
* Search and rewrites urls into common javascript events
*
* @param tag the tag to serach for events..
* @param aResource the current Resource
*/
private void findAndRewriteJsEvents(Tag tag, ProxymaResource aResource) {
for (int i = 0; i < EVENTS.length; i++) {
String tagValue = tag.getAttribute(EVENTS[i]);
if (tagValue != null) {
tag.removeAttribute(EVENTS[i]);
Attribute attribute = new Attribute();
attribute.setName(EVENTS[i]);
attribute.setAssignment("=");
attribute.setRawValue("'" + findAndRewriteJSLinks(tagValue, aResource) + "'");
tag.setAttributeEx(attribute);
}
}
}
/**
* The logger of the context..
*/
private Logger log = null;
/**
* The rewriter engine capable to rewrite URLs and Cookies.
*/
private URLRewriteEngine rewriter = null;
/**
* The only way to share variable between this class and its nested companion
* is to have private attributes with the wanted values.
*/
private ProxymaResource currentResource = null;
/**
* This is the Regular Expressione that does most of the work.
* It's used to recognize links on CSS and rewrite them with the rewrite engine.
*/
private static final Pattern jsLinksPattern = Pattern.compile("(?:\"|\')(http(?:s)?://.*[^\"'])(?:\"|\')", Pattern.CASE_INSENSITIVE);
/**
* One of the values for the content type header that activates this plugin.
*/
private static final Pattern htmlContentTypePattern = Pattern.compile("^text/html.*$", Pattern.CASE_INSENSITIVE);
/**
* One of the values for the content type header that activates this plugin.
*/
private static final Pattern textContentTypePattern = Pattern.compile("^text/plain.*$", Pattern.CASE_INSENSITIVE);
/**
* One of the values for the content type header that activates this plugin.
*/
private static final Pattern jsContentTypePattern = Pattern.compile("^application/javascript.*$", Pattern.CASE_INSENSITIVE);
/**
* Charset match Pattern
*/
private static final Pattern charsetPattern = Pattern.compile("^.*; *charset *= *", Pattern.CASE_INSENSITIVE);
/**
* The content type header
*/
private static final String CONTENT_TYPE_HEADER = "Content-Type";
/**
* The name of this plugin.
*/
private static final String name = "Basic Javascript URL Rewriter";
/**
* A short html description of what it does.
*/
private static final String description = ""
+ "This plugin is a Basic Javescript Transformer.<br/>"
+ "Its purpose is to scan the html pages and the JavaScript libraries seraching for URLs.<br/>"
+ "If any URL (http...) is found, it will be rewritten it in order to force the client browser "
+ "to use proxyma to retrive it.";
//INSPECTED TAG NAMES
private final static String SCRIPT = "script";
private final static String BODY = "body";
private final static String A = "a";
private final static String IMG = "img";
private final static String LINK = "link";
private final static String FORM = "form";
private final static String INPUT = "input";
private final static String AREA = "area";
private final static String INS = "ins";
private final static String DEL = "del";
private final static String OBJECT = "del";
//INSPECTED ATTRIBUTES
public final static String SRC = "src";
private final static String ONLOAD = "onLoad";
private final static String JAVASCRIPT_SEGMENT = "javascript";
private final static String EVENTS[] = {"onClick", "onRollOver", "onRollOut", "onChange"};
//Only an empty string.
private final static String EMPTY_STRING = "";
}
| JSRewriteTransformer - masqueradeURL fix doubling of urls | proxyma-core/src/main/java/m/c/m/proxyma/plugins/transformers/JSRewriteTransformer.java | JSRewriteTransformer - masqueradeURL fix doubling of urls |
|
Java | lgpl-2.1 | f718b6b67ac9c3361c7e836d42529a8f8f87f920 | 0 | fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui | package to.etc.domui.component.dynaima;
import org.jCharts.axisChart.*;
import org.jCharts.chartData.*;
import org.jCharts.properties.*;
import org.jCharts.types.*;
/**
* Helper class to initialize a Bar chart.
*
* @author <a href="mailto:[email protected]">Nemanja Maksimovic</a>
* Created on 11 May 2011
*/
public class BarCharter extends AbstractCharter {
private ClusteredBarChartProperties barChartProperties = new ClusteredBarChartProperties();
private String m_bucketTitle;
private String m_valueTitle;
public BarCharter(JGraphChartSource source, String title, int width, int height, String bucketTitle, String valueTitle) {
super(source, title, width, height);
m_bucketTitle = bucketTitle;
m_valueTitle = valueTitle;
}
@Override
public void finish() throws Exception {
double[][] data = getChartDataAsMatrix();
String[] axisLabels = {" "};
DataSeries ds = new DataSeries(axisLabels, m_bucketTitle, m_valueTitle, m_title);
AxisChartDataSet ads = new AxisChartDataSet(data, getChartDataLabels(), selectPaints(), ChartType.BAR_CLUSTERED, barChartProperties);
ds.addIAxisPlotDataSet(ads);
AxisChart c = new AxisChart(ds, m_properties, m_axisProperties, m_legendProperties, m_width, m_height);
m_source.setChart(c);
}
private double[][] getChartDataAsMatrix() {
double[] original = getChartDataValues();
double[][] result = new double[original.length][1];
for(int i = 0; i < original.length; i++) {
result[i][0] = original[i];
}
return result;
}
}
| to.etc.domui/src/to/etc/domui/component/dynaima/BarCharter.java | package to.etc.domui.component.dynaima;
import org.jCharts.axisChart.*;
import org.jCharts.chartData.*;
import org.jCharts.properties.*;
import org.jCharts.types.*;
/**
* Helper class to initialize a Bar chart.
*
* @author <a href="mailto:[email protected]">Nemanja Maksimovic</a>
* Created on 11 May 2011
*/
public class BarCharter extends AbstractCharter {
private ClusteredBarChartProperties barChartProperties = new ClusteredBarChartProperties();
private String m_bucketTitle;
private String m_valueTitle;
public BarCharter(JGraphChartSource source, String title, int width, int height, String bucketTitle, String valueTitle) {
super(source, title, width, height);
m_bucketTitle = bucketTitle;
m_valueTitle = valueTitle;
}
@Override
public void finish() throws Exception {
double[][] data = getDataAsMatrix(getChartDataValues());
String[] axisLabels = {" "};
DataSeries ds = new DataSeries(axisLabels, m_bucketTitle, m_valueTitle, m_title);
AxisChartDataSet ads = new AxisChartDataSet(data, getChartDataLabels(), selectPaints(), ChartType.BAR_CLUSTERED, barChartProperties);
ds.addIAxisPlotDataSet(ads);
AxisChart c = new AxisChart(ds, m_properties, m_axisProperties, m_legendProperties, m_width, m_height);
m_source.setChart(c);
}
private double[][] getDataAsMatrix(double[] original){
double[][] result = new double[original.length][1];
for(int i = 0; i < getChartDataValues().length; i++) {
result[i][0] = getChartDataValues()[i];
}
return result;
}
}
| Refactored out redundant parameter. | to.etc.domui/src/to/etc/domui/component/dynaima/BarCharter.java | Refactored out redundant parameter. |
|
Java | lgpl-2.1 | 247b9bba473877387eb16503793c8808fb83afa7 | 0 | julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine | package org.intermine.bio.dataconversion;
/*
* Copyright (C) 2002-2010 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.intermine.dataconversion.DataConverter;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.xml.full.Item;
/**
* Processor of file with information about genes. Format of file:
* <tt>
* tax_id GeneID Symbol LocusTag Synonyms dbXrefs chromosome map_location description
* type_of_gene Symbol_from_nomenclature_authority Full_name_from_nomenclature_authority
* Nomenclature_status Other_designations Modification_date (tab is used as a separator,
* pound sign - start of a comment)
* </tt>
* @author Jakub Kulaviak
**/
public class GenesFileProcessor
{
private BufferedReader infoReader;
private int lineCounter = 0;
private Map<String, Item> genes = new HashMap<String, Item>();
private String lastLine = null;
private DataConverter converter;
private Integer checkOrganismId = null;
private Set<Integer> alreadyProcessedGenes = new HashSet<Integer>();
private Set<String> genesToRemove = new TreeSet<String>();
private IdResolver resolver;
private static final Logger LOG = Logger.getLogger(GenesFileProcessor.class);
private String datasetRefId;
/**
* Constructor.
* @param fileReader file reader, this class is not responsible for closing fileReader
* @param converter associated converter that is used for creating and saving items
* @param datasetRefId reference to dataset object for the gene
* @param resolverFactory the FlyBase id resolver factory
*/
public GenesFileProcessor(Reader fileReader, DataConverter converter, String datasetRefId,
IdResolverFactory resolverFactory) {
// converter is needed for creating items method
// all converters must used one central converter for creating items because
// to be sure, that created items will have unique id
this.converter = converter;
this.datasetRefId = datasetRefId;
initReader(fileReader);
resolver = resolverFactory.getIdResolver(false);
}
private void initReader(Reader fileReader) {
infoReader = new BufferedReader(fileReader);
}
/**
*
* @param geneToPub map between gene and list of publication that mentions this gene
* @param orgToProcessId organism to be processed id
* @param orgToProcess organism to be processed
* @throws IOException when error happens during reading from file
*/
public void processGenes(Map<Integer, List<String>> geneToPub, Integer orgToProcessId,
Item orgToProcess) throws IOException {
String line;
// use taxonID to get correct type of data where available
while ((line = getLine()) != null) {
lineCounter++;
line = line.trim();
if (line.startsWith("#") || line.length() == 0) {
continue;
}
String[] parts = line.split("\\t");
if (parts.length < 6) {
throw new GenesProcessorException("Invalid line - line " + lineCounter
+ ". There are " + parts.length + " bits but there should be more than 6");
}
Integer organismId, ncbiGeneId;
try {
organismId = new Integer(parts[0].trim());
ncbiGeneId = new Integer(parts[1].trim());
} catch (NumberFormatException ex) {
throw new GenesProcessorException("Invalid identifiers at line " + line);
}
checkFileIsSorted(organismId);
//String identifier = parts[3].trim();
String pubMedId = parts[5].trim();
if (orgToProcessId.intValue() == organismId.intValue()) {
processGeneInfo(ncbiGeneId, organismId, pubMedId, geneToPub.get(ncbiGeneId),
orgToProcess);
geneToPub.remove(ncbiGeneId);
} else if (organismId.intValue() > orgToProcessId.intValue()) {
lastLine = line;
storeGenes();
checkGenesProcessed(geneToPub);
return;
} else {
continue;
}
}
storeGenes();
checkGenesProcessed(geneToPub);
}
private void checkGenesProcessed(Map<Integer, List<String>> geneToPub) {
if (geneToPub.size() != 0) {
throw new GenesProcessorException("These " + geneToPub.size() + " genes were in the "
+ "PubMed2Gene file but not in the gene info file: "
+ formatGeneNames(geneToPub.keySet()));
}
}
private void checkFileIsSorted(Integer organismId) {
if (checkOrganismId != null) {
if (organismId.intValue() < checkOrganismId.intValue()) {
throw new GenesProcessorException("This file processor expects that "
+ "file is sorted according to the organism id else the "
+ "behaviour is undefined.");
}
}
checkOrganismId = organismId;
}
private String formatGeneNames(Set<Integer> keySet) {
StringBuilder sb = new StringBuilder();
for (Integer id : keySet) {
sb.append(id + ", ");
}
return sb.toString();
}
private String getLine() throws IOException {
if (lastLine != null) {
String tmp = lastLine;
lastLine = null;
return tmp;
}
return infoReader.readLine();
}
private void storeGenes() {
for (String id : genesToRemove) {
genes.remove(id);
}
try {
List<Item> gs = new ArrayList<Item>();
for (String id : genes.keySet()) {
gs.add(genes.get(id));
}
store(gs);
} catch (ObjectStoreException e) {
throw new GenesProcessorException(e);
}
genes = new HashMap<String, Item>();
}
private void store(List<Item> genes2) throws ObjectStoreException {
converter.store(genes2);
}
private void processGeneInfo(Integer ncbiGeneId, Integer organismId, String primaryIdentifier,
List<String> publications, Item organism) {
String primIdentifier = primaryIdentifier;
// If gene was already mentioned in gene info file then is skipped
if (alreadyProcessedGenes.contains(ncbiGeneId)) {
return;
}
// If there is a gene in gene information file that doesn't have
// any publication then the gene is skipped
// if there isn't primary identifier gene is skipped
if (publications != null && !"-".equals(primIdentifier)) {
if (setPrimaryIdentifier(organismId.toString())) {
primIdentifier = removeDatabasePrefix(primIdentifier);
if (!isValidPrimIdentifier(primIdentifier)) {
return;
}
if (isDrosophilaMelanogaster(organismId.toString()) && resolver != null) {
primIdentifier = resolvePrimIdentifier(organismId.toString(), primIdentifier);
}
if (primIdentifier == null) {
LOG.warn("RESOLVER: failed to resolve gene to one identifier, ignoring gene: "
+ primaryIdentifier + ". Number of matched ids: "
+ resolver.countResolutions(organismId.toString(), primIdentifier));
return;
}
} else {
primIdentifier = null;
}
Item gene = createGene(ncbiGeneId, primIdentifier, organism);
for (String writerPubId : publications) {
gene.addToCollection("publications", writerPubId);
}
// checks gene duplicates - if there are two or more same genes with
// the same primIdentifier but different ncbi gene id then all these genes are removed
if (primIdentifier != null) {
if (genes.get(primIdentifier) == null) {
genes.put(primIdentifier, gene);
} else {
genesToRemove.add(primIdentifier);
}
}
alreadyProcessedGenes.add(ncbiGeneId);
}
}
private boolean setPrimaryIdentifier(String taxonId) {
return !isHomoSapiens(taxonId);
}
private boolean isValidPrimIdentifier(String primIdentifier) {
return !primIdentifier.contains("|");
}
private String resolvePrimIdentifier(String taxonId, String primIdentifier) {
int resCount = resolver.countResolutions(taxonId, primIdentifier);
if (resCount != 1) {
return null;
}
return resolver.resolveId(taxonId, primIdentifier).iterator().next();
}
private boolean isDrosophilaMelanogaster(String taxonId) {
return "7227".equals(taxonId);
}
private boolean isHomoSapiens(String taxonId) {
return "9606".equals(taxonId);
}
private Item createGene(Integer ncbiGeneId, String primaryIdentifier, Item organism) {
Item gene = createItem("Gene");
gene.setAttribute("ncbiGeneNumber", ncbiGeneId.toString());
if (primaryIdentifier != null) {
gene.setAttribute("primaryIdentifier", primaryIdentifier);
}
gene.setReference("organism", organism);
gene.setCollection("dataSets", new ArrayList<String>(Collections.singleton(datasetRefId)));
return gene;
}
private String removeDatabasePrefix(String id) {
String dbId = id;
if (dbId.toUpperCase().startsWith("SGD:")) {
dbId = dbId.substring(4);
} else if (dbId.toUpperCase().startsWith("WORMBASE:")) {
dbId = dbId.substring(9);
} else if (dbId.toUpperCase().startsWith("FLYBASE:")) {
dbId = dbId.substring(8);
} else if (dbId.toUpperCase().startsWith("VECTORBASE:")) {
dbId = dbId.substring(11);
}
return dbId;
}
private Item createItem(String className) {
return converter.createItem(className);
}
}
| bio/sources/pubmed-gene/main/src/org/intermine/bio/dataconversion/GenesFileProcessor.java | package org.intermine.bio.dataconversion;
/*
* Copyright (C) 2002-2010 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.intermine.dataconversion.DataConverter;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.xml.full.Item;
/**
* Processor of file with information about genes. Format of file:
* <tt>
* tax_id GeneID Symbol LocusTag Synonyms dbXrefs chromosome map_location description
* type_of_gene Symbol_from_nomenclature_authority Full_name_from_nomenclature_authority
* Nomenclature_status Other_designations Modification_date (tab is used as a separator,
* pound sign - start of a comment)
* </tt>
* @author Jakub Kulaviak
**/
public class GenesFileProcessor
{
private BufferedReader infoReader;
private int lineCounter = 0;
private Map<String, Item> genes = new HashMap<String, Item>();
private String lastLine = null;
private DataConverter converter;
private Integer checkOrganismId = null;
private Set<Integer> alreadyProcessedGenes = new HashSet<Integer>();
private Set<String> genesToRemove = new TreeSet<String>();
private IdResolver resolver;
private static final Logger LOG = Logger.getLogger(GenesFileProcessor.class);
private String datasetRefId;
/**
* Constructor.
* @param fileReader file reader, this class is not responsible for closing fileReader
* @param converter associated converter that is used for creating and saving items
* @param datasetRefId reference to dataset object for the gene
* @param resolverFactory the FlyBase id resolver factory
*/
public GenesFileProcessor(Reader fileReader, DataConverter converter, String datasetRefId,
IdResolverFactory resolverFactory) {
// converter is needed for creating items method
// all converters must used one central converter for creating items because
// to be sure, that created items will have unique id
this.converter = converter;
this.datasetRefId = datasetRefId;
initReader(fileReader);
resolver = resolverFactory.getIdResolver(false);
}
private void initReader(Reader fileReader) {
infoReader = new BufferedReader(fileReader);
}
/**
*
* @param geneToPub map between gene and list of publication that mentions this gene
* @param orgToProcessId organism to be processed id
* @param orgToProcess organism to be processed
* @throws IOException when error happens during reading from file
*/
public void processGenes(Map<Integer, List<String>> geneToPub, Integer orgToProcessId,
Item orgToProcess) throws IOException {
String line;
// use taxonID to get correct type of data where available
while ((line = getLine()) != null) {
lineCounter++;
line = line.trim();
if (line.startsWith("#") || line.length() == 0) {
continue;
}
String[] parts = line.split("\\t");
if (parts.length < 6) {
throw new GenesProcessorException("Invalid line - line " + lineCounter
+ ". There are " + parts.length + " bits but there should be more than 6");
}
Integer organismId, ncbiGeneId;
try {
organismId = new Integer(parts[0].trim());
ncbiGeneId = new Integer(parts[1].trim());
} catch (NumberFormatException ex) {
throw new GenesProcessorException("Invalid identifiers at line " + line);
}
checkFileIsSorted(organismId);
//String identifier = parts[3].trim();
String pubMedId = parts[5].trim();
if (orgToProcessId.intValue() == organismId.intValue()) {
processGeneInfo(ncbiGeneId, organismId, pubMedId, geneToPub.get(ncbiGeneId),
orgToProcess);
geneToPub.remove(ncbiGeneId);
} else if (organismId.intValue() > orgToProcessId.intValue()) {
lastLine = line;
storeGenes();
checkGenesProcessed(geneToPub);
return;
} else {
continue;
}
}
storeGenes();
checkGenesProcessed(geneToPub);
}
private void checkGenesProcessed(Map<Integer, List<String>> geneToPub) {
if (geneToPub.size() != 0) {
throw new GenesProcessorException("These " + geneToPub.size() + " genes were in the "
+ "PubMed2Gene file but not in the gene info file: "
+ formatGeneNames(geneToPub.keySet()));
}
}
private void checkFileIsSorted(Integer organismId) {
if (checkOrganismId != null) {
if (organismId.intValue() < checkOrganismId.intValue()) {
throw new GenesProcessorException("This file processor expects that "
+ "file is sorted according to the organism id else the "
+ "behaviour is undefined.");
}
}
checkOrganismId = organismId;
}
private String formatGeneNames(Set<Integer> keySet) {
StringBuilder sb = new StringBuilder();
for (Integer id : keySet) {
sb.append(id + ", ");
}
return sb.toString();
}
private String getLine() throws IOException {
if (lastLine != null) {
String tmp = lastLine;
lastLine = null;
return tmp;
}
return infoReader.readLine();
}
private void storeGenes() {
for (String id : genesToRemove) {
genes.remove(id);
}
try {
List<Item> gs = new ArrayList<Item>();
for (String id : genes.keySet()) {
gs.add(genes.get(id));
}
store(gs);
} catch (ObjectStoreException e) {
throw new GenesProcessorException(e);
}
genes = new HashMap<String, Item>();
}
private void store(List<Item> genes2) throws ObjectStoreException {
converter.store(genes2);
}
private void processGeneInfo(Integer ncbiGeneId, Integer organismId, String primaryIdentifier,
List<String> publications, Item organism) {
String primIdentifier = primaryIdentifier;
// If gene was already mentioned in gene info file then is skipped
if (alreadyProcessedGenes.contains(ncbiGeneId)) {
return;
}
// If there is a gene in gene information file that doesn't have
// any publication then the gene is skipped
// if there isn't primary identifier gene is skipped
if (publications != null && !"-".equals(primIdentifier)) {
if (setPrimaryIdentifier(organismId.toString())) {
primIdentifier = removeDatabasePrefix(primIdentifier);
if (!isValidPrimIdentifier(primIdentifier)) {
return;
}
if (isDrosophilaMelanogaster(organismId.toString()) && resolver != null) {
primIdentifier = resolvePrimIdentifier(organismId.toString(), primIdentifier);
}
if (primIdentifier == null) {
LOG.warn("RESOLVER: failed to resolve gene to one identifier, ignoring gene: "
+ primaryIdentifier + ". Number of matched ids: "
+ resolver.countResolutions(organismId.toString(), primIdentifier));
return;
}
} else {
primIdentifier = null;
}
Item gene = createGene(ncbiGeneId, primIdentifier, organism);
for (String writerPubId : publications) {
gene.addToCollection("publications", writerPubId);
}
// checks gene duplicates - if there are two or more same genes with
// the same primIdentifier but different ncbi gene id then all these genes are removed
if (genes.get(primIdentifier) == null) {
genes.put(primIdentifier, gene);
} else {
genesToRemove.add(primIdentifier);
}
alreadyProcessedGenes.add(ncbiGeneId);
}
}
private boolean setPrimaryIdentifier(String taxonId) {
return !isHomoSapiens(taxonId);
}
private boolean isValidPrimIdentifier(String primIdentifier) {
return !primIdentifier.contains("|");
}
private String resolvePrimIdentifier(String taxonId, String primIdentifier) {
int resCount = resolver.countResolutions(taxonId, primIdentifier);
if (resCount != 1) {
return null;
}
return resolver.resolveId(taxonId, primIdentifier).iterator().next();
}
private boolean isDrosophilaMelanogaster(String taxonId) {
return "7227".equals(taxonId);
}
private boolean isHomoSapiens(String taxonId) {
return "9606".equals(taxonId);
}
private Item createGene(Integer ncbiGeneId, String primaryIdentifier, Item organism) {
Item gene = createItem("Gene");
gene.setAttribute("ncbiGeneNumber", ncbiGeneId.toString());
if (primaryIdentifier != null) {
gene.setAttribute("primaryIdentifier", primaryIdentifier);
}
gene.setReference("organism", organism);
gene.setCollection("dataSets", new ArrayList<String>(Collections.singleton(datasetRefId)));
return gene;
}
private String removeDatabasePrefix(String id) {
String dbId = id;
if (dbId.toUpperCase().startsWith("SGD:")) {
dbId = dbId.substring(4);
} else if (dbId.toUpperCase().startsWith("WORMBASE:")) {
dbId = dbId.substring(9);
} else if (dbId.toUpperCase().startsWith("FLYBASE:")) {
dbId = dbId.substring(8);
} else if (dbId.toUpperCase().startsWith("VECTORBASE:")) {
dbId = dbId.substring(11);
}
return dbId;
}
private Item createItem(String className) {
return converter.createItem(className);
}
}
| Fix NullPointerException reading human genes.
Former-commit-id: dcc36c751297f384553f3074de21fab58e6551c5 | bio/sources/pubmed-gene/main/src/org/intermine/bio/dataconversion/GenesFileProcessor.java | Fix NullPointerException reading human genes. |
|
Java | lgpl-2.1 | 405855a0cecc6634f224cc912045a9f10635fe5d | 0 | ivassile/wildfly-core,aloubyansky/wildfly-core,luck3y/wildfly-core,JiriOndrusek/wildfly-core,yersan/wildfly-core,aloubyansky/wildfly-core,jfdenise/wildfly-core,ivassile/wildfly-core,bstansberry/wildfly-core,darranl/wildfly-core,ivassile/wildfly-core,JiriOndrusek/wildfly-core,soul2zimate/wildfly-core,aloubyansky/wildfly-core,darranl/wildfly-core,luck3y/wildfly-core,jamezp/wildfly-core,jamezp/wildfly-core,yersan/wildfly-core,bstansberry/wildfly-core,bstansberry/wildfly-core,yersan/wildfly-core,soul2zimate/wildfly-core,darranl/wildfly-core,jamezp/wildfly-core,JiriOndrusek/wildfly-core,jfdenise/wildfly-core,jfdenise/wildfly-core,luck3y/wildfly-core,soul2zimate/wildfly-core | /*
* JBoss, Home of Professional Open Source
* Copyright 2014, JBoss Inc., and individual contributors as indicated
* by the @authors tag.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.as.controller;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.RESULT;
import java.util.Set;
import org.jboss.as.controller.OperationContext.Stage;
import org.jboss.as.controller.logging.ControllerLogger;
import org.jboss.as.controller.operations.common.Util;
import org.jboss.as.controller.registry.AttributeAccess;
import org.jboss.as.controller.registry.ImmutableManagementResourceRegistration;
import org.jboss.as.controller.registry.Resource;
import org.jboss.dmr.ModelNode;
/**
* @author Tomaz Cerar (c) 2014 Red Hat Inc.
*/
class ValidateModelStepHandler implements OperationStepHandler {
private static volatile ValidateModelStepHandler INSTANCE;
private final OperationStepHandler extraValidationStepHandler;
private ValidateModelStepHandler(OperationStepHandler extraValidationStepHandler) {
this.extraValidationStepHandler = extraValidationStepHandler;
}
static ValidateModelStepHandler getInstance(OperationStepHandler extraValidationStepHandler) {
if (INSTANCE == null) {
synchronized (ValidateModelStepHandler.class) {
if (INSTANCE == null) {
INSTANCE = new ValidateModelStepHandler(extraValidationStepHandler);
}
}
}
return INSTANCE;
}
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
final Resource resource = loadResource(context);
if (resource == null) {
return;
}
if (extraValidationStepHandler != null) {
context.addStep(operation, extraValidationStepHandler, Stage.MODEL);
}
final ModelNode model = resource.getModel();
final ImmutableManagementResourceRegistration resourceRegistration = context.getResourceRegistration();
final Set<String> attributeNames = resourceRegistration.getAttributeNames(PathAddress.EMPTY_ADDRESS);
for (final String attributeName : attributeNames) {
final boolean has = model.hasDefined(attributeName);
final AttributeAccess access = context.getResourceRegistration().getAttributeAccess(PathAddress.EMPTY_ADDRESS, attributeName);
if (access.getStorageType() != AttributeAccess.Storage.CONFIGURATION){
continue;
}
final AttributeDefinition attr = access.getAttributeDefinition();
if (!has && isRequired(attr, model)) {
attemptReadMissingAttributeValueFromHandler(context, access, attributeName, new ErrorHandler() {
@Override
public void throwError() throws OperationFailedException {
throw new OperationFailedException(ControllerLogger.ROOT_LOGGER.required(attributeName));
}});
}
if (!has) {
continue;
}
if (attr.getRequires() != null) {
for (final String required : attr.getRequires()) {
if (!model.hasDefined(required)) {
attemptReadMissingAttributeValueFromHandler(context, access, attributeName, new ErrorHandler() {
@Override
public void throwError() throws OperationFailedException {
throw ControllerLogger.ROOT_LOGGER.requiredAttributeNotSet(required, attr.getName());
}});
}
}
}
if (!isAllowed(attr, model)) {
//TODO should really use attemptReadMissingAttributeValueFromHandler() to make this totally good, but the
//overhead might be bigger than is worth at the moment since we would have to invoke the extra steps for
//every single attribute not found (and not found should be the normal).
String[] alts = attr.getAlternatives();
StringBuilder sb = null;
if (alts != null) {
for (String alt : alts) {
if (model.hasDefined(alt)) {
if (sb == null) {
sb = new StringBuilder();
} else {
sb.append(", ");
}
sb.append(alt);
}
}
}
throw new OperationFailedException(ControllerLogger.ROOT_LOGGER.invalidAttributeCombo(attributeName, sb));
}
handleObjectAttributes(model, attr, attributeName);
}
context.completeStep(OperationContext.RollbackHandler.NOOP_ROLLBACK_HANDLER);
}
private void attemptReadMissingAttributeValueFromHandler(final OperationContext context, final AttributeAccess attributeAccess,
final String attributeName, final ErrorHandler errorHandler) throws OperationFailedException {
OperationStepHandler handler = attributeAccess.getReadHandler();
if (handler == null) {
errorHandler.throwError();
} else {
final ModelNode readAttr = Util.getReadAttributeOperation(context.getCurrentAddress(), attributeName);
//Do a read-attribute as an immediate step
final ModelNode resultHolder = new ModelNode();
context.addStep(resultHolder, readAttr, handler, Stage.MODEL, true);
//Then check the read-attribute result in a later step and throw the error if it is not set
context.addStep(new OperationStepHandler() {
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
if (!resultHolder.isDefined() && !resultHolder.hasDefined(RESULT)) {
errorHandler.throwError();
}
}
}, Stage.MODEL);
}
}
private void handleObjectAttributes(ModelNode model, AttributeDefinition attr, String absoluteParentName) throws OperationFailedException {
if (attr instanceof ObjectTypeAttributeDefinition) {
validateNestedAttributes(model.get(attr.getName()), attr, absoluteParentName);
} else if (attr instanceof ObjectListAttributeDefinition) {
AttributeDefinition valueType = ((ObjectListAttributeDefinition) attr).getValueType();
ModelNode list = model.get(attr.getName());
for (int i = 0; i < list.asInt(); i++) {
validateNestedAttributes(list.get(i), valueType, absoluteParentName + "[" + i + "]");
}
} else if (attr instanceof ObjectMapAttributeDefinition) {
AttributeDefinition valueType = ((ObjectMapAttributeDefinition) attr).getValueType();
ModelNode map = model.get(attr.getName());
for (String key : map.keys()) {
validateNestedAttributes(map.get(key), valueType, absoluteParentName + "." + key);
}
}
}
private void validateNestedAttributes(ModelNode subModel, AttributeDefinition attr, String absoluteParentName) throws OperationFailedException {
if (!subModel.isDefined()) {
return;
}
AttributeDefinition[] subAttrs = ((ObjectTypeAttributeDefinition) attr).getValueTypes();
for (AttributeDefinition subAttr : subAttrs) {
String subAttributeName = subAttr.getName();
String absoluteName = absoluteParentName + "." + subAttributeName;
if (!subModel.hasDefined(subAttributeName) && isRequired(subAttr, subModel)) {
throw new OperationFailedException(ControllerLogger.ROOT_LOGGER.required(subAttributeName));
}
if (!subModel.hasDefined(subAttributeName)) {
continue;
}
if (subAttr.getRequires() != null) {
for (final String required : subAttr.getRequires()) {
if (!subModel.hasDefined(required)) {
throw ControllerLogger.ROOT_LOGGER.requiredAttributeNotSet(absoluteParentName + "." + required, absoluteName);
}
}
}
if (!isAllowed(subAttr, subModel)) {
String[] alts = subAttr.getAlternatives();
StringBuilder sb = null;
if (alts != null) {
for (String alt : alts) {
if (subModel.hasDefined(alt)) {
if (sb == null) {
sb = new StringBuilder();
} else {
sb.append(", ");
}
sb.append(absoluteParentName + "." + alt);
}
}
}
throw new OperationFailedException(ControllerLogger.ROOT_LOGGER.invalidAttributeCombo(absoluteName, sb));
}
handleObjectAttributes(subModel, subAttr, absoluteName);
}
}
private boolean isRequired(final AttributeDefinition def, final ModelNode model) {
final boolean required = def.isRequired() && !def.isResourceOnly();
return required ? !hasAlternative(def.getAlternatives(), model) : required;
}
private boolean isAllowed(final AttributeDefinition def, final ModelNode model) {
final String[] alternatives = def.getAlternatives();
if (alternatives != null) {
for (final String alternative : alternatives) {
if (model.hasDefined(alternative)) {
return false;
}
}
}
return true;
}
private boolean hasAlternative(final String[] alternatives, ModelNode operationObject) {
if (alternatives != null) {
for (final String alternative : alternatives) {
if (operationObject.hasDefined(alternative)) {
return true;
}
}
}
return false;
}
private Resource loadResource(OperationContext context) {
final PathAddress address = context.getCurrentAddress();
PathAddress current = PathAddress.EMPTY_ADDRESS;
final ImmutableManagementResourceRegistration mrr = context.getRootResourceRegistration();
Resource resource = context.readResourceFromRoot(PathAddress.EMPTY_ADDRESS, false);
for (PathElement element : address) {
if (resource.isRuntime()){
return null;
}
current = current.append(element);
final ImmutableManagementResourceRegistration subMrr = mrr.getSubModel(current);
if (subMrr == null || subMrr.isRuntimeOnly() || subMrr.isRemote()) {
return null;
}
if (!resource.hasChild(element)) {
return null;
}
resource = context.readResourceFromRoot(current, false);
}
if (resource.isRuntime()) {
return null;
}
return resource;
}
private interface ErrorHandler {
void throwError() throws OperationFailedException;
}
}
| controller/src/main/java/org/jboss/as/controller/ValidateModelStepHandler.java | /*
* JBoss, Home of Professional Open Source
* Copyright 2014, JBoss Inc., and individual contributors as indicated
* by the @authors tag.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.as.controller;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.RESULT;
import java.util.Set;
import org.jboss.as.controller.OperationContext.Stage;
import org.jboss.as.controller.logging.ControllerLogger;
import org.jboss.as.controller.operations.common.Util;
import org.jboss.as.controller.registry.AttributeAccess;
import org.jboss.as.controller.registry.ImmutableManagementResourceRegistration;
import org.jboss.as.controller.registry.Resource;
import org.jboss.dmr.ModelNode;
/**
* @author Tomaz Cerar (c) 2014 Red Hat Inc.
*/
class ValidateModelStepHandler implements OperationStepHandler {
private static volatile ValidateModelStepHandler INSTANCE;
private final OperationStepHandler extraValidationStepHandler;
private ValidateModelStepHandler(OperationStepHandler extraValidationStepHandler) {
this.extraValidationStepHandler = extraValidationStepHandler;
}
static ValidateModelStepHandler getInstance(OperationStepHandler extraValidationStepHandler) {
if (INSTANCE == null) {
synchronized (ValidateModelStepHandler.class) {
if (INSTANCE == null) {
INSTANCE = new ValidateModelStepHandler(extraValidationStepHandler);
}
}
}
return INSTANCE;
}
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
final Resource resource = loadResource(context);
if (resource == null) {
return;
}
if (extraValidationStepHandler != null) {
context.addStep(operation, extraValidationStepHandler, Stage.MODEL);
}
final ModelNode model = resource.getModel();
final ImmutableManagementResourceRegistration resourceRegistration = context.getResourceRegistration();
final Set<String> attributeNames = resourceRegistration.getAttributeNames(PathAddress.EMPTY_ADDRESS);
for (final String attributeName : attributeNames) {
final boolean has = model.hasDefined(attributeName);
final AttributeAccess access = context.getResourceRegistration().getAttributeAccess(PathAddress.EMPTY_ADDRESS, attributeName);
if (access.getStorageType() != AttributeAccess.Storage.CONFIGURATION){
continue;
}
final AttributeDefinition attr = access.getAttributeDefinition();
if (!has && isRequired(attr, model)) {
attemptReadMissingAttributeValueFromHandler(context, access, attributeName, new ErrorHandler() {
@Override
public void throwError() throws OperationFailedException {
throw new OperationFailedException(ControllerLogger.ROOT_LOGGER.required(attributeName));
}});
}
if (!has) {
continue;
}
if (attr.getRequires() != null) {
for (final String required : attr.getRequires()) {
if (!model.hasDefined(required)) {
attemptReadMissingAttributeValueFromHandler(context, access, attributeName, new ErrorHandler() {
@Override
public void throwError() throws OperationFailedException {
throw ControllerLogger.ROOT_LOGGER.requiredAttributeNotSet(required, attr.getName());
}});
}
}
}
if (!isAllowed(attr, model)) {
//TODO should really use attemptReadMissingAttributeValueFromHandler() to make this totally good, but the
//overhead might be bigger than is worth at the moment since we would have to invoke the extra steps for
//every single attribute not found (and not found should be the normal).
String[] alts = attr.getAlternatives();
StringBuilder sb = null;
if (alts != null) {
for (String alt : alts) {
if (model.hasDefined(alt)) {
if (sb == null) {
sb = new StringBuilder();
} else {
sb.append(", ");
}
sb.append(alt);
}
}
}
throw new OperationFailedException(ControllerLogger.ROOT_LOGGER.invalidAttributeCombo(attributeName, sb));
}
}
context.completeStep(OperationContext.RollbackHandler.NOOP_ROLLBACK_HANDLER);
}
private void attemptReadMissingAttributeValueFromHandler(final OperationContext context, final AttributeAccess attributeAccess,
final String attributeName, final ErrorHandler errorHandler) throws OperationFailedException {
OperationStepHandler handler = attributeAccess.getReadHandler();
if (handler == null) {
errorHandler.throwError();
} else {
final ModelNode readAttr = Util.getReadAttributeOperation(context.getCurrentAddress(), attributeName);
//Do a read-attribute as an immediate step
final ModelNode resultHolder = new ModelNode();
context.addStep(resultHolder, readAttr, handler, Stage.MODEL, true);
//Then check the read-attribute result in a later step and throw the error if it is not set
context.addStep(new OperationStepHandler() {
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
if (!resultHolder.isDefined() && !resultHolder.hasDefined(RESULT)) {
errorHandler.throwError();
}
}
}, Stage.MODEL);
}
}
private boolean isRequired(final AttributeDefinition def, final ModelNode model) {
final boolean required = def.isRequired() && !def.isResourceOnly();
return required ? !hasAlternative(def.getAlternatives(), model) : required;
}
private boolean isAllowed(final AttributeDefinition def, final ModelNode model) {
final String[] alternatives = def.getAlternatives();
if (alternatives != null) {
for (final String alternative : alternatives) {
if (model.hasDefined(alternative)) {
return false;
}
}
}
return true;
}
private boolean hasAlternative(final String[] alternatives, ModelNode operationObject) {
if (alternatives != null) {
for (final String alternative : alternatives) {
if (operationObject.hasDefined(alternative)) {
return true;
}
}
}
return false;
}
private Resource loadResource(OperationContext context) {
final PathAddress address = context.getCurrentAddress();
PathAddress current = PathAddress.EMPTY_ADDRESS;
final ImmutableManagementResourceRegistration mrr = context.getRootResourceRegistration();
Resource resource = context.readResourceFromRoot(PathAddress.EMPTY_ADDRESS, false);
for (PathElement element : address) {
if (resource.isRuntime()){
return null;
}
current = current.append(element);
final ImmutableManagementResourceRegistration subMrr = mrr.getSubModel(current);
if (subMrr == null || subMrr.isRuntimeOnly() || subMrr.isRemote()) {
return null;
}
if (!resource.hasChild(element)) {
return null;
}
resource = context.readResourceFromRoot(current, false);
}
if (resource.isRuntime()) {
return null;
}
return resource;
}
private interface ErrorHandler {
void throwError() throws OperationFailedException;
}
}
| WFCORE-2317: add validation for nested attributes
| controller/src/main/java/org/jboss/as/controller/ValidateModelStepHandler.java | WFCORE-2317: add validation for nested attributes |
|
Java | apache-2.0 | 56cb327f1e714fc7caf83d21ce82fcc3288b58e0 | 0 | google/ExoPlayer,ened/ExoPlayer,google/ExoPlayer,amzn/exoplayer-amazon-port,androidx/media,google/ExoPlayer,amzn/exoplayer-amazon-port,ened/ExoPlayer,amzn/exoplayer-amazon-port,androidx/media,ened/ExoPlayer,androidx/media | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import static java.lang.Math.max;
import static java.lang.Math.min;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.PlaybackParams;
import android.os.ConditionVariable;
import android.os.Handler;
import android.os.SystemClock;
import android.util.Pair;
import androidx.annotation.IntDef;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.audio.AudioProcessor.UnhandledAudioFormatException;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Plays audio data. The implementation delegates to an {@link AudioTrack} and handles playback
* position smoothing, non-blocking writes and reconfiguration.
*
* <p>If tunneling mode is enabled, care must be taken that audio processors do not output buffers
* with a different duration than their input, and buffer processors must produce output
* corresponding to their last input immediately after that input is queued. This means that, for
* example, speed adjustment is not possible while using tunneling.
*/
public final class DefaultAudioSink implements AudioSink {
/**
* Thrown when the audio track has provided a spurious timestamp, if {@link
* #failOnSpuriousAudioTimestamp} is set.
*/
public static final class InvalidAudioTrackTimestampException extends RuntimeException {
/**
* Creates a new invalid timestamp exception with the specified message.
*
* @param message The detail message for this exception.
*/
private InvalidAudioTrackTimestampException(String message) {
super(message);
}
}
/**
* Provides a chain of audio processors, which are used for any user-defined processing and
* applying playback parameters (if supported). Because applying playback parameters can skip and
* stretch/compress audio, the sink will query the chain for information on how to transform its
* output position to map it onto a media position, via {@link #getMediaDuration(long)} and {@link
* #getSkippedOutputFrameCount()}.
*/
public interface AudioProcessorChain {
/**
* Returns the fixed chain of audio processors that will process audio. This method is called
* once during initialization, but audio processors may change state to become active/inactive
* during playback.
*/
AudioProcessor[] getAudioProcessors();
/**
* Configures audio processors to apply the specified playback parameters immediately, returning
* the new playback parameters, which may differ from those passed in. Only called when
* processors have no input pending.
*
* @param playbackParameters The playback parameters to try to apply.
* @return The playback parameters that were actually applied.
*/
PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters);
/**
* Configures audio processors to apply whether to skip silences immediately, returning the new
* value. Only called when processors have no input pending.
*
* @param skipSilenceEnabled Whether silences should be skipped in the audio stream.
* @return The new value.
*/
boolean applySkipSilenceEnabled(boolean skipSilenceEnabled);
/**
* Scales the specified playout duration to take into account speedup due to audio processing,
* returning an input media duration, in arbitrary units.
*/
long getMediaDuration(long playoutDuration);
/**
* Returns the number of output audio frames skipped since the audio processors were last
* flushed.
*/
long getSkippedOutputFrameCount();
}
/**
* The default audio processor chain, which applies a (possibly empty) chain of user-defined audio
* processors followed by {@link SilenceSkippingAudioProcessor} and {@link SonicAudioProcessor}.
*/
public static class DefaultAudioProcessorChain implements AudioProcessorChain {
private final AudioProcessor[] audioProcessors;
private final SilenceSkippingAudioProcessor silenceSkippingAudioProcessor;
private final SonicAudioProcessor sonicAudioProcessor;
/**
* Creates a new default chain of audio processors, with the user-defined {@code
* audioProcessors} applied before silence skipping and speed adjustment processors.
*/
public DefaultAudioProcessorChain(AudioProcessor... audioProcessors) {
this(audioProcessors, new SilenceSkippingAudioProcessor(), new SonicAudioProcessor());
}
/**
* Creates a new default chain of audio processors, with the user-defined {@code
* audioProcessors} applied before silence skipping and speed adjustment processors.
*/
public DefaultAudioProcessorChain(
AudioProcessor[] audioProcessors,
SilenceSkippingAudioProcessor silenceSkippingAudioProcessor,
SonicAudioProcessor sonicAudioProcessor) {
// The passed-in type may be more specialized than AudioProcessor[], so allocate a new array
// rather than using Arrays.copyOf.
this.audioProcessors = new AudioProcessor[audioProcessors.length + 2];
System.arraycopy(
/* src= */ audioProcessors,
/* srcPos= */ 0,
/* dest= */ this.audioProcessors,
/* destPos= */ 0,
/* length= */ audioProcessors.length);
this.silenceSkippingAudioProcessor = silenceSkippingAudioProcessor;
this.sonicAudioProcessor = sonicAudioProcessor;
this.audioProcessors[audioProcessors.length] = silenceSkippingAudioProcessor;
this.audioProcessors[audioProcessors.length + 1] = sonicAudioProcessor;
}
@Override
public AudioProcessor[] getAudioProcessors() {
return audioProcessors;
}
@Override
public PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters) {
float speed = sonicAudioProcessor.setSpeed(playbackParameters.speed);
float pitch = sonicAudioProcessor.setPitch(playbackParameters.pitch);
return new PlaybackParameters(speed, pitch);
}
@Override
public boolean applySkipSilenceEnabled(boolean skipSilenceEnabled) {
silenceSkippingAudioProcessor.setEnabled(skipSilenceEnabled);
return skipSilenceEnabled;
}
@Override
public long getMediaDuration(long playoutDuration) {
return sonicAudioProcessor.scaleDurationForSpeedup(playoutDuration);
}
@Override
public long getSkippedOutputFrameCount() {
return silenceSkippingAudioProcessor.getSkippedFrames();
}
}
/** The default playback speed. */
public static final float DEFAULT_PLAYBACK_SPEED = 1f;
/** The minimum allowed playback speed. Lower values will be constrained to fall in range. */
public static final float MIN_PLAYBACK_SPEED = 0.1f;
/** The maximum allowed playback speed. Higher values will be constrained to fall in range. */
public static final float MAX_PLAYBACK_SPEED = 8f;
/** The minimum allowed pitch factor. Lower values will be constrained to fall in range. */
public static final float MIN_PITCH = 0.1f;
/** The maximum allowed pitch factor. Higher values will be constrained to fall in range. */
public static final float MAX_PITCH = 8f;
/** The default skip silence flag. */
private static final boolean DEFAULT_SKIP_SILENCE = false;
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({OUTPUT_MODE_PCM, OUTPUT_MODE_OFFLOAD, OUTPUT_MODE_PASSTHROUGH})
private @interface OutputMode {}
private static final int OUTPUT_MODE_PCM = 0;
private static final int OUTPUT_MODE_OFFLOAD = 1;
private static final int OUTPUT_MODE_PASSTHROUGH = 2;
/** A minimum length for the {@link AudioTrack} buffer, in microseconds. */
private static final long MIN_BUFFER_DURATION_US = 250_000;
/** A maximum length for the {@link AudioTrack} buffer, in microseconds. */
private static final long MAX_BUFFER_DURATION_US = 750_000;
/** The length for passthrough {@link AudioTrack} buffers, in microseconds. */
private static final long PASSTHROUGH_BUFFER_DURATION_US = 250_000;
/** The length for offload {@link AudioTrack} buffers, in microseconds. */
private static final long OFFLOAD_BUFFER_DURATION_US = 50_000_000;
/**
* A multiplication factor to apply to the minimum buffer size requested by the underlying {@link
* AudioTrack}.
*/
private static final int BUFFER_MULTIPLICATION_FACTOR = 4;
/** To avoid underruns on some devices (e.g., Broadcom 7271), scale up the AC3 buffer duration. */
private static final int AC3_BUFFER_MULTIPLICATION_FACTOR = 2;
/**
* Native error code equivalent of {@link AudioTrack#ERROR_DEAD_OBJECT} to workaround missing
* error code translation on some devices.
*
* <p>On some devices, AudioTrack native error codes are not always converted to their SDK
* equivalent.
*
* <p>For example: {@link AudioTrack#write(byte[], int, int)} can return -32 instead of {@link
* AudioTrack#ERROR_DEAD_OBJECT}.
*/
private static final int ERROR_NATIVE_DEAD_OBJECT = -32;
private static final String TAG = "AudioTrack";
/**
* Whether to enable a workaround for an issue where an audio effect does not keep its session
* active across releasing/initializing a new audio track, on platform builds where
* {@link Util#SDK_INT} < 21.
* <p>
* The flag must be set before creating a player.
*/
public static boolean enablePreV21AudioSessionWorkaround = false;
/**
* Whether to throw an {@link InvalidAudioTrackTimestampException} when a spurious timestamp is
* reported from {@link AudioTrack#getTimestamp}.
* <p>
* The flag must be set before creating a player. Should be set to {@code true} for testing and
* debugging purposes only.
*/
public static boolean failOnSpuriousAudioTimestamp = false;
@Nullable private final AudioCapabilities audioCapabilities;
private final AudioProcessorChain audioProcessorChain;
private final boolean enableFloatOutput;
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final TrimmingAudioProcessor trimmingAudioProcessor;
private final AudioProcessor[] toIntPcmAvailableAudioProcessors;
private final AudioProcessor[] toFloatPcmAvailableAudioProcessors;
private final ConditionVariable releasingConditionVariable;
private final AudioTrackPositionTracker audioTrackPositionTracker;
private final ArrayDeque<MediaPositionParameters> mediaPositionParametersCheckpoints;
private final boolean enableAudioTrackPlaybackParams;
private final boolean enableOffload;
@MonotonicNonNull private StreamEventCallbackV29 offloadStreamEventCallbackV29;
@Nullable private Listener listener;
/**
* Used to keep the audio session active on pre-V21 builds (see {@link #initializeAudioTrack()}).
*/
@Nullable private AudioTrack keepSessionIdAudioTrack;
@Nullable private Configuration pendingConfiguration;
@MonotonicNonNull private Configuration configuration;
@Nullable private AudioTrack audioTrack;
private AudioAttributes audioAttributes;
@Nullable private MediaPositionParameters afterDrainParameters;
private MediaPositionParameters mediaPositionParameters;
private PlaybackParameters audioTrackPlaybackParameters;
@Nullable private ByteBuffer avSyncHeader;
private int bytesUntilNextAvSync;
private long submittedPcmBytes;
private long submittedEncodedFrames;
private long writtenPcmBytes;
private long writtenEncodedFrames;
private int framesPerEncodedSample;
private boolean startMediaTimeUsNeedsSync;
private boolean startMediaTimeUsNeedsInit;
private long startMediaTimeUs;
private float volume;
private AudioProcessor[] activeAudioProcessors;
private ByteBuffer[] outputBuffers;
@Nullable private ByteBuffer inputBuffer;
private int inputBufferAccessUnitCount;
@Nullable private ByteBuffer outputBuffer;
@MonotonicNonNull private byte[] preV21OutputBuffer;
private int preV21OutputBufferOffset;
private int drainingAudioProcessorIndex;
private boolean handledEndOfStream;
private boolean stoppedAudioTrack;
private boolean playing;
private int audioSessionId;
private AuxEffectInfo auxEffectInfo;
private boolean tunneling;
private long lastFeedElapsedRealtimeMs;
private boolean offloadDisabledUntilNextConfiguration;
private boolean isWaitingForOffloadEndOfStreamHandled;
/**
* Creates a new default audio sink.
*
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty.
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors) {
this(audioCapabilities, audioProcessors, /* enableFloatOutput= */ false);
}
/**
* Creates a new default audio sink, optionally using float output for high resolution PCM.
*
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty.
* @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float
* output will be used if the input is 32-bit float, and also if the input is high resolution
* (24-bit or 32-bit) integer PCM. Audio processing (for example, speed adjustment) will not
* be available when float output is in use.
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities,
AudioProcessor[] audioProcessors,
boolean enableFloatOutput) {
this(
audioCapabilities,
new DefaultAudioProcessorChain(audioProcessors),
enableFloatOutput,
/* enableAudioTrackPlaybackParams= */ false,
/* enableOffload= */ false);
}
/**
* Creates a new default audio sink, optionally using float output for high resolution PCM and
* with the specified {@code audioProcessorChain}.
*
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessorChain An {@link AudioProcessorChain} which is used to apply playback
* parameters adjustments. The instance passed in must not be reused in other sinks.
* @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float
* output will be used if the input is 32-bit float, and also if the input is high resolution
* (24-bit or 32-bit) integer PCM. Float output is supported from API level 21. Audio
* processing (for example, speed adjustment) will not be available when float output is in
* use.
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, if supported.
* @param enableOffload Whether to enable audio offload. If an audio format can be both played
* with offload and encoded audio passthrough, it will be played in offload. Audio offload is
* supported from API level 29. Most Android devices can only support one offload {@link
* android.media.AudioTrack} at a time and can invalidate it at any time. Thus an app can
* never be guaranteed that it will be able to play in offload. Audio processing (for example,
* speed adjustment) will not be available when offload is in use.
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities,
AudioProcessorChain audioProcessorChain,
boolean enableFloatOutput,
boolean enableAudioTrackPlaybackParams,
boolean enableOffload) {
this.audioCapabilities = audioCapabilities;
this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain);
this.enableFloatOutput = Util.SDK_INT >= 21 && enableFloatOutput;
this.enableAudioTrackPlaybackParams = Util.SDK_INT >= 23 && enableAudioTrackPlaybackParams;
this.enableOffload = Util.SDK_INT >= 29 && enableOffload;
releasingConditionVariable = new ConditionVariable(true);
audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
trimmingAudioProcessor = new TrimmingAudioProcessor();
ArrayList<AudioProcessor> toIntPcmAudioProcessors = new ArrayList<>();
Collections.addAll(
toIntPcmAudioProcessors,
new ResamplingAudioProcessor(),
channelMappingAudioProcessor,
trimmingAudioProcessor);
Collections.addAll(toIntPcmAudioProcessors, audioProcessorChain.getAudioProcessors());
toIntPcmAvailableAudioProcessors = toIntPcmAudioProcessors.toArray(new AudioProcessor[0]);
toFloatPcmAvailableAudioProcessors = new AudioProcessor[] {new FloatResamplingAudioProcessor()};
volume = 1f;
audioAttributes = AudioAttributes.DEFAULT;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
auxEffectInfo = new AuxEffectInfo(AuxEffectInfo.NO_AUX_EFFECT_ID, 0f);
mediaPositionParameters =
new MediaPositionParameters(
PlaybackParameters.DEFAULT,
DEFAULT_SKIP_SILENCE,
/* mediaTimeUs= */ 0,
/* audioTrackPositionUs= */ 0);
audioTrackPlaybackParameters = PlaybackParameters.DEFAULT;
drainingAudioProcessorIndex = C.INDEX_UNSET;
activeAudioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0];
mediaPositionParametersCheckpoints = new ArrayDeque<>();
}
// AudioSink implementation.
@Override
public void setListener(Listener listener) {
this.listener = listener;
}
@Override
public boolean supportsFormat(Format format) {
return getFormatSupport(format) != SINK_FORMAT_UNSUPPORTED;
}
@Override
@SinkFormatSupport
public int getFormatSupport(Format format) {
if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)) {
if (!Util.isEncodingLinearPcm(format.pcmEncoding)) {
Log.w(TAG, "Invalid PCM encoding: " + format.pcmEncoding);
return SINK_FORMAT_UNSUPPORTED;
}
if (format.pcmEncoding == C.ENCODING_PCM_16BIT
|| (enableFloatOutput && format.pcmEncoding == C.ENCODING_PCM_FLOAT)) {
return SINK_FORMAT_SUPPORTED_DIRECTLY;
}
// We can resample all linear PCM encodings to 16-bit integer PCM, which AudioTrack is
// guaranteed to support.
return SINK_FORMAT_SUPPORTED_WITH_TRANSCODING;
}
if (enableOffload
&& !offloadDisabledUntilNextConfiguration
&& isOffloadedPlaybackSupported(format, audioAttributes)) {
return SINK_FORMAT_SUPPORTED_DIRECTLY;
}
if (isPassthroughPlaybackSupported(format, audioCapabilities)) {
return SINK_FORMAT_SUPPORTED_DIRECTLY;
}
return SINK_FORMAT_UNSUPPORTED;
}
@Override
public long getCurrentPositionUs(boolean sourceEnded) {
if (!isAudioTrackInitialized() || startMediaTimeUsNeedsInit) {
return CURRENT_POSITION_NOT_SET;
}
long positionUs = audioTrackPositionTracker.getCurrentPositionUs(sourceEnded);
positionUs = min(positionUs, configuration.framesToDurationUs(getWrittenFrames()));
return applySkipping(applyMediaPositionParameters(positionUs));
}
@Override
public void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels)
throws ConfigurationException {
int inputPcmFrameSize;
@Nullable AudioProcessor[] availableAudioProcessors;
boolean canApplyPlaybackParameters;
@OutputMode int outputMode;
@C.Encoding int outputEncoding;
int outputSampleRate;
int outputChannelConfig;
int outputPcmFrameSize;
if (MimeTypes.AUDIO_RAW.equals(inputFormat.sampleMimeType)) {
Assertions.checkArgument(Util.isEncodingLinearPcm(inputFormat.pcmEncoding));
inputPcmFrameSize = Util.getPcmFrameSize(inputFormat.pcmEncoding, inputFormat.channelCount);
boolean useFloatOutput =
enableFloatOutput && Util.isEncodingHighResolutionPcm(inputFormat.pcmEncoding);
availableAudioProcessors =
useFloatOutput ? toFloatPcmAvailableAudioProcessors : toIntPcmAvailableAudioProcessors;
canApplyPlaybackParameters = !useFloatOutput;
trimmingAudioProcessor.setTrimFrameCount(
inputFormat.encoderDelay, inputFormat.encoderPadding);
if (Util.SDK_INT < 21 && inputFormat.channelCount == 8 && outputChannels == null) {
// AudioTrack doesn't support 8 channel output before Android L. Discard the last two (side)
// channels to give a 6 channel stream that is supported.
outputChannels = new int[6];
for (int i = 0; i < outputChannels.length; i++) {
outputChannels[i] = i;
}
}
channelMappingAudioProcessor.setChannelMap(outputChannels);
AudioProcessor.AudioFormat outputFormat =
new AudioProcessor.AudioFormat(
inputFormat.sampleRate, inputFormat.channelCount, inputFormat.pcmEncoding);
for (AudioProcessor audioProcessor : availableAudioProcessors) {
try {
AudioProcessor.AudioFormat nextFormat = audioProcessor.configure(outputFormat);
if (audioProcessor.isActive()) {
outputFormat = nextFormat;
}
} catch (UnhandledAudioFormatException e) {
throw new ConfigurationException(e);
}
}
outputMode = OUTPUT_MODE_PCM;
outputEncoding = outputFormat.encoding;
outputSampleRate = outputFormat.sampleRate;
outputChannelConfig = Util.getAudioTrackChannelConfig(outputFormat.channelCount);
outputPcmFrameSize = Util.getPcmFrameSize(outputEncoding, outputFormat.channelCount);
} else {
inputPcmFrameSize = C.LENGTH_UNSET;
availableAudioProcessors = new AudioProcessor[0];
canApplyPlaybackParameters = false;
outputSampleRate = inputFormat.sampleRate;
outputPcmFrameSize = C.LENGTH_UNSET;
if (enableOffload && isOffloadedPlaybackSupported(inputFormat, audioAttributes)) {
outputMode = OUTPUT_MODE_OFFLOAD;
outputEncoding =
MimeTypes.getEncoding(
Assertions.checkNotNull(inputFormat.sampleMimeType), inputFormat.codecs);
outputChannelConfig = Util.getAudioTrackChannelConfig(inputFormat.channelCount);
} else {
outputMode = OUTPUT_MODE_PASSTHROUGH;
@Nullable
Pair<Integer, Integer> encodingAndChannelConfig =
getEncodingAndChannelConfigForPassthrough(inputFormat, audioCapabilities);
if (encodingAndChannelConfig == null) {
throw new ConfigurationException("Unable to configure passthrough for: " + inputFormat);
}
outputEncoding = encodingAndChannelConfig.first;
outputChannelConfig = encodingAndChannelConfig.second;
}
}
if (outputEncoding == C.ENCODING_INVALID) {
throw new ConfigurationException(
"Invalid output encoding (mode=" + outputMode + ") for: " + inputFormat);
}
if (outputChannelConfig == AudioFormat.CHANNEL_INVALID) {
throw new ConfigurationException(
"Invalid output channel config (mode=" + outputMode + ") for: " + inputFormat);
}
offloadDisabledUntilNextConfiguration = false;
Configuration pendingConfiguration =
new Configuration(
inputFormat,
inputPcmFrameSize,
outputMode,
outputPcmFrameSize,
outputSampleRate,
outputChannelConfig,
outputEncoding,
specifiedBufferSize,
enableAudioTrackPlaybackParams,
canApplyPlaybackParameters,
availableAudioProcessors);
if (isAudioTrackInitialized()) {
this.pendingConfiguration = pendingConfiguration;
} else {
configuration = pendingConfiguration;
}
}
private void setupAudioProcessors() {
AudioProcessor[] audioProcessors = configuration.availableAudioProcessors;
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (AudioProcessor audioProcessor : audioProcessors) {
if (audioProcessor.isActive()) {
newAudioProcessors.add(audioProcessor);
} else {
audioProcessor.flush();
}
}
int count = newAudioProcessors.size();
activeAudioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
outputBuffers = new ByteBuffer[count];
flushAudioProcessors();
}
private void flushAudioProcessors() {
for (int i = 0; i < activeAudioProcessors.length; i++) {
AudioProcessor audioProcessor = activeAudioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
}
}
private void initializeAudioTrack() throws InitializationException {
// If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
// the shared memory that's available for audio track buffers. This would in turn cause the
// initialization of the audio track to fail.
releasingConditionVariable.block();
audioTrack = buildAudioTrack();
if (isOffloadedPlayback(audioTrack)) {
registerStreamEventCallbackV29(audioTrack);
audioTrack.setOffloadDelayPadding(
configuration.inputFormat.encoderDelay, configuration.inputFormat.encoderPadding);
}
int audioSessionId = audioTrack.getAudioSessionId();
if (enablePreV21AudioSessionWorkaround) {
if (Util.SDK_INT < 21) {
// The workaround creates an audio track with a two byte buffer on the same session, and
// does not release it until this object is released, which keeps the session active.
if (keepSessionIdAudioTrack != null
&& audioSessionId != keepSessionIdAudioTrack.getAudioSessionId()) {
releaseKeepSessionIdAudioTrack();
}
if (keepSessionIdAudioTrack == null) {
keepSessionIdAudioTrack = initializeKeepSessionIdAudioTrack(audioSessionId);
}
}
}
if (this.audioSessionId != audioSessionId) {
this.audioSessionId = audioSessionId;
if (listener != null) {
listener.onAudioSessionId(audioSessionId);
}
}
audioTrackPositionTracker.setAudioTrack(
audioTrack,
/* isPassthrough= */ configuration.outputMode == OUTPUT_MODE_PASSTHROUGH,
configuration.outputEncoding,
configuration.outputPcmFrameSize,
configuration.bufferSize);
setVolumeInternal();
if (auxEffectInfo.effectId != AuxEffectInfo.NO_AUX_EFFECT_ID) {
audioTrack.attachAuxEffect(auxEffectInfo.effectId);
audioTrack.setAuxEffectSendLevel(auxEffectInfo.sendLevel);
}
startMediaTimeUsNeedsInit = true;
}
@Override
public void play() {
playing = true;
if (isAudioTrackInitialized()) {
audioTrackPositionTracker.start();
audioTrack.play();
}
}
@Override
public void handleDiscontinuity() {
// Force resynchronization after a skipped buffer.
startMediaTimeUsNeedsSync = true;
}
@Override
@SuppressWarnings("ReferenceEquality")
public boolean handleBuffer(
ByteBuffer buffer, long presentationTimeUs, int encodedAccessUnitCount)
throws InitializationException, WriteException {
Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer);
if (pendingConfiguration != null) {
if (!drainToEndOfStream()) {
// There's still pending data in audio processors to write to the track.
return false;
} else if (!pendingConfiguration.canReuseAudioTrack(configuration)) {
playPendingData();
if (hasPendingData()) {
// We're waiting for playout on the current audio track to finish.
return false;
}
flush();
} else {
// The current audio track can be reused for the new configuration.
configuration = pendingConfiguration;
pendingConfiguration = null;
if (isOffloadedPlayback(audioTrack)) {
audioTrack.setOffloadEndOfStream();
audioTrack.setOffloadDelayPadding(
configuration.inputFormat.encoderDelay, configuration.inputFormat.encoderPadding);
isWaitingForOffloadEndOfStreamHandled = true;
}
}
// Re-apply playback parameters.
applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs);
}
if (!isAudioTrackInitialized()) {
initializeAudioTrack();
}
if (startMediaTimeUsNeedsInit) {
startMediaTimeUs = max(0, presentationTimeUs);
startMediaTimeUsNeedsSync = false;
startMediaTimeUsNeedsInit = false;
if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) {
setAudioTrackPlaybackParametersV23(audioTrackPlaybackParameters);
}
applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs);
if (playing) {
play();
}
}
if (!audioTrackPositionTracker.mayHandleBuffer(getWrittenFrames())) {
return false;
}
if (inputBuffer == null) {
// We are seeing this buffer for the first time.
Assertions.checkArgument(buffer.order() == ByteOrder.LITTLE_ENDIAN);
if (!buffer.hasRemaining()) {
// The buffer is empty.
return true;
}
if (configuration.outputMode != OUTPUT_MODE_PCM && framesPerEncodedSample == 0) {
// If this is the first encoded sample, calculate the sample size in frames.
framesPerEncodedSample = getFramesPerEncodedSample(configuration.outputEncoding, buffer);
if (framesPerEncodedSample == 0) {
// We still don't know the number of frames per sample, so drop the buffer.
// For TrueHD this can occur after some seek operations, as not every sample starts with
// a syncframe header. If we chunked samples together so the extracted samples always
// started with a syncframe header, the chunks would be too large.
return true;
}
}
if (afterDrainParameters != null) {
if (!drainToEndOfStream()) {
// Don't process any more input until draining completes.
return false;
}
applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs);
afterDrainParameters = null;
}
// Check that presentationTimeUs is consistent with the expected value.
long expectedPresentationTimeUs =
startMediaTimeUs
+ configuration.inputFramesToDurationUs(
getSubmittedFrames() - trimmingAudioProcessor.getTrimmedFrameCount());
if (!startMediaTimeUsNeedsSync
&& Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) {
Log.e(
TAG,
"Discontinuity detected [expected "
+ expectedPresentationTimeUs
+ ", got "
+ presentationTimeUs
+ "]");
startMediaTimeUsNeedsSync = true;
}
if (startMediaTimeUsNeedsSync) {
if (!drainToEndOfStream()) {
// Don't update timing until pending AudioProcessor buffers are completely drained.
return false;
}
// Adjust startMediaTimeUs to be consistent with the current buffer's start time and the
// number of bytes submitted.
long adjustmentUs = presentationTimeUs - expectedPresentationTimeUs;
startMediaTimeUs += adjustmentUs;
startMediaTimeUsNeedsSync = false;
// Re-apply playback parameters because the startMediaTimeUs changed.
applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs);
if (listener != null && adjustmentUs != 0) {
listener.onPositionDiscontinuity();
}
}
if (configuration.outputMode == OUTPUT_MODE_PCM) {
submittedPcmBytes += buffer.remaining();
} else {
submittedEncodedFrames += framesPerEncodedSample * encodedAccessUnitCount;
}
inputBuffer = buffer;
inputBufferAccessUnitCount = encodedAccessUnitCount;
}
processBuffers(presentationTimeUs);
if (!inputBuffer.hasRemaining()) {
inputBuffer = null;
inputBufferAccessUnitCount = 0;
return true;
}
if (audioTrackPositionTracker.isStalled(getWrittenFrames())) {
Log.w(TAG, "Resetting stalled audio track");
flush();
return true;
}
return false;
}
private AudioTrack buildAudioTrack() throws InitializationException {
try {
return Assertions.checkNotNull(configuration)
.buildAudioTrack(tunneling, audioAttributes, audioSessionId);
} catch (InitializationException e) {
maybeDisableOffload();
if (listener != null) {
listener.onAudioSinkError(e);
}
throw e;
}
}
@RequiresApi(29)
private void registerStreamEventCallbackV29(AudioTrack audioTrack) {
if (offloadStreamEventCallbackV29 == null) {
// Must be lazily initialized to receive stream event callbacks on the current (playback)
// thread as the constructor is not called in the playback thread.
offloadStreamEventCallbackV29 = new StreamEventCallbackV29();
}
offloadStreamEventCallbackV29.register(audioTrack);
}
private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
int count = activeAudioProcessors.length;
int index = count;
while (index >= 0) {
ByteBuffer input = index > 0 ? outputBuffers[index - 1]
: (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
if (index == count) {
writeBuffer(input, avSyncPresentationTimeUs);
} else {
AudioProcessor audioProcessor = activeAudioProcessors[index];
audioProcessor.queueInput(input);
ByteBuffer output = audioProcessor.getOutput();
outputBuffers[index] = output;
if (output.hasRemaining()) {
// Handle the output as input to the next audio processor or the AudioTrack.
index++;
continue;
}
}
if (input.hasRemaining()) {
// The input wasn't consumed and no output was produced, so give up for now.
return;
}
// Get more input from upstream.
index--;
}
}
@SuppressWarnings("ReferenceEquality")
private void writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs) throws WriteException {
if (!buffer.hasRemaining()) {
return;
}
if (outputBuffer != null) {
Assertions.checkArgument(outputBuffer == buffer);
} else {
outputBuffer = buffer;
if (Util.SDK_INT < 21) {
int bytesRemaining = buffer.remaining();
if (preV21OutputBuffer == null || preV21OutputBuffer.length < bytesRemaining) {
preV21OutputBuffer = new byte[bytesRemaining];
}
int originalPosition = buffer.position();
buffer.get(preV21OutputBuffer, 0, bytesRemaining);
buffer.position(originalPosition);
preV21OutputBufferOffset = 0;
}
}
int bytesRemaining = buffer.remaining();
int bytesWrittenOrError = 0; // Error if negative
if (Util.SDK_INT < 21) { // outputMode == OUTPUT_MODE_PCM.
// Work out how many bytes we can write without the risk of blocking.
int bytesToWrite = audioTrackPositionTracker.getAvailableBufferSize(writtenPcmBytes);
if (bytesToWrite > 0) {
bytesToWrite = min(bytesRemaining, bytesToWrite);
bytesWrittenOrError =
audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite);
if (bytesWrittenOrError > 0) { // No error
preV21OutputBufferOffset += bytesWrittenOrError;
buffer.position(buffer.position() + bytesWrittenOrError);
}
}
} else if (tunneling) {
Assertions.checkState(avSyncPresentationTimeUs != C.TIME_UNSET);
bytesWrittenOrError =
writeNonBlockingWithAvSyncV21(
audioTrack, buffer, bytesRemaining, avSyncPresentationTimeUs);
} else {
bytesWrittenOrError = writeNonBlockingV21(audioTrack, buffer, bytesRemaining);
}
lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
if (bytesWrittenOrError < 0) {
int error = bytesWrittenOrError;
boolean isRecoverable = isAudioTrackDeadObject(error);
if (isRecoverable) {
maybeDisableOffload();
}
WriteException e = new WriteException(error, isRecoverable);
if (listener != null) {
listener.onAudioSinkError(e);
}
throw e;
}
int bytesWritten = bytesWrittenOrError;
if (isOffloadedPlayback(audioTrack)) {
// After calling AudioTrack.setOffloadEndOfStream, the AudioTrack internally stops and
// restarts during which AudioTrack.write will return 0. This situation must be detected to
// prevent reporting the buffer as full even though it is not which could lead ExoPlayer to
// sleep forever waiting for a onDataRequest that will never come.
if (writtenEncodedFrames > 0) {
isWaitingForOffloadEndOfStreamHandled = false;
}
// Consider the offload buffer as full if the AudioTrack is playing and AudioTrack.write could
// not write all the data provided to it. This relies on the assumption that AudioTrack.write
// always writes as much as possible.
if (playing
&& listener != null
&& bytesWritten < bytesRemaining
&& !isWaitingForOffloadEndOfStreamHandled) {
long pendingDurationMs =
audioTrackPositionTracker.getPendingBufferDurationMs(writtenEncodedFrames);
listener.onOffloadBufferFull(pendingDurationMs);
}
}
if (configuration.outputMode == OUTPUT_MODE_PCM) {
writtenPcmBytes += bytesWritten;
}
if (bytesWritten == bytesRemaining) {
if (configuration.outputMode != OUTPUT_MODE_PCM) {
// When playing non-PCM, the inputBuffer is never processed, thus the last inputBuffer
// must be the current input buffer.
Assertions.checkState(buffer == inputBuffer);
writtenEncodedFrames += framesPerEncodedSample * inputBufferAccessUnitCount;
}
outputBuffer = null;
}
}
@Override
public void playToEndOfStream() throws WriteException {
if (!handledEndOfStream && isAudioTrackInitialized() && drainToEndOfStream()) {
playPendingData();
handledEndOfStream = true;
}
}
private void maybeDisableOffload() {
if (!configuration.outputModeIsOffload()) {
return;
}
// Offload was requested, but may not be available. There are cases when this can occur even if
// AudioManager.isOffloadedPlaybackSupported returned true. For example, due to use of an
// AudioPlaybackCaptureConfiguration. Disable offload until the sink is next configured.
offloadDisabledUntilNextConfiguration = true;
}
private static boolean isAudioTrackDeadObject(int status) {
return (Util.SDK_INT >= 24 && status == AudioTrack.ERROR_DEAD_OBJECT)
|| status == ERROR_NATIVE_DEAD_OBJECT;
}
private boolean drainToEndOfStream() throws WriteException {
boolean audioProcessorNeedsEndOfStream = false;
if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
drainingAudioProcessorIndex = 0;
audioProcessorNeedsEndOfStream = true;
}
while (drainingAudioProcessorIndex < activeAudioProcessors.length) {
AudioProcessor audioProcessor = activeAudioProcessors[drainingAudioProcessorIndex];
if (audioProcessorNeedsEndOfStream) {
audioProcessor.queueEndOfStream();
}
processBuffers(C.TIME_UNSET);
if (!audioProcessor.isEnded()) {
return false;
}
audioProcessorNeedsEndOfStream = true;
drainingAudioProcessorIndex++;
}
// Finish writing any remaining output to the track.
if (outputBuffer != null) {
writeBuffer(outputBuffer, C.TIME_UNSET);
if (outputBuffer != null) {
return false;
}
}
drainingAudioProcessorIndex = C.INDEX_UNSET;
return true;
}
@Override
public boolean isEnded() {
return !isAudioTrackInitialized() || (handledEndOfStream && !hasPendingData());
}
@Override
public boolean hasPendingData() {
return isAudioTrackInitialized()
&& audioTrackPositionTracker.hasPendingData(getWrittenFrames());
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
playbackParameters =
new PlaybackParameters(
Util.constrainValue(playbackParameters.speed, MIN_PLAYBACK_SPEED, MAX_PLAYBACK_SPEED),
Util.constrainValue(playbackParameters.pitch, MIN_PITCH, MAX_PITCH));
if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) {
setAudioTrackPlaybackParametersV23(playbackParameters);
} else {
setAudioProcessorPlaybackParametersAndSkipSilence(
playbackParameters, getSkipSilenceEnabled());
}
}
@Override
public PlaybackParameters getPlaybackParameters() {
return enableAudioTrackPlaybackParams
? audioTrackPlaybackParameters
: getAudioProcessorPlaybackParameters();
}
@Override
public void setSkipSilenceEnabled(boolean skipSilenceEnabled) {
setAudioProcessorPlaybackParametersAndSkipSilence(
getAudioProcessorPlaybackParameters(), skipSilenceEnabled);
}
@Override
public boolean getSkipSilenceEnabled() {
return getMediaPositionParameters().skipSilence;
}
@Override
public void setAudioAttributes(AudioAttributes audioAttributes) {
if (this.audioAttributes.equals(audioAttributes)) {
return;
}
this.audioAttributes = audioAttributes;
if (tunneling) {
// The audio attributes are ignored in tunneling mode, so no need to reset.
return;
}
flush();
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
@Override
public void setAudioSessionId(int audioSessionId) {
if (this.audioSessionId != audioSessionId) {
this.audioSessionId = audioSessionId;
flush();
}
}
@Override
public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) {
if (this.auxEffectInfo.equals(auxEffectInfo)) {
return;
}
int effectId = auxEffectInfo.effectId;
float sendLevel = auxEffectInfo.sendLevel;
if (audioTrack != null) {
if (this.auxEffectInfo.effectId != effectId) {
audioTrack.attachAuxEffect(effectId);
}
if (effectId != AuxEffectInfo.NO_AUX_EFFECT_ID) {
audioTrack.setAuxEffectSendLevel(sendLevel);
}
}
this.auxEffectInfo = auxEffectInfo;
}
@Override
public void enableTunnelingV21(int tunnelingAudioSessionId) {
Assertions.checkState(Util.SDK_INT >= 21);
if (!tunneling || audioSessionId != tunnelingAudioSessionId) {
tunneling = true;
audioSessionId = tunnelingAudioSessionId;
flush();
}
}
@Override
public void disableTunneling() {
if (tunneling) {
tunneling = false;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
flush();
}
}
@Override
public void setVolume(float volume) {
if (this.volume != volume) {
this.volume = volume;
setVolumeInternal();
}
}
private void setVolumeInternal() {
if (!isAudioTrackInitialized()) {
// Do nothing.
} else if (Util.SDK_INT >= 21) {
setVolumeInternalV21(audioTrack, volume);
} else {
setVolumeInternalV3(audioTrack, volume);
}
}
@Override
public void pause() {
playing = false;
if (isAudioTrackInitialized() && audioTrackPositionTracker.pause()) {
audioTrack.pause();
}
}
@Override
public void flush() {
if (isAudioTrackInitialized()) {
resetSinkStateForFlush();
if (audioTrackPositionTracker.isPlaying()) {
audioTrack.pause();
}
if (isOffloadedPlayback(audioTrack)) {
Assertions.checkNotNull(offloadStreamEventCallbackV29).unregister(audioTrack);
}
// AudioTrack.release can take some time, so we call it on a background thread.
final AudioTrack toRelease = audioTrack;
audioTrack = null;
if (pendingConfiguration != null) {
configuration = pendingConfiguration;
pendingConfiguration = null;
}
audioTrackPositionTracker.reset();
releasingConditionVariable.close();
new Thread("ExoPlayer:AudioTrackReleaseThread") {
@Override
public void run() {
try {
toRelease.flush();
toRelease.release();
} finally {
releasingConditionVariable.open();
}
}
}.start();
}
}
@Override
public void experimentalFlushWithoutAudioTrackRelease() {
// Prior to SDK 25, AudioTrack flush does not work as intended, and therefore it must be
// released and reinitialized. (Internal reference: b/143500232)
if (Util.SDK_INT < 25) {
flush();
return;
}
if (!isAudioTrackInitialized()) {
return;
}
resetSinkStateForFlush();
if (audioTrackPositionTracker.isPlaying()) {
audioTrack.pause();
}
audioTrack.flush();
audioTrackPositionTracker.reset();
audioTrackPositionTracker.setAudioTrack(
audioTrack,
/* isPassthrough= */ configuration.outputMode == OUTPUT_MODE_PASSTHROUGH,
configuration.outputEncoding,
configuration.outputPcmFrameSize,
configuration.bufferSize);
startMediaTimeUsNeedsInit = true;
}
@Override
public void reset() {
flush();
releaseKeepSessionIdAudioTrack();
for (AudioProcessor audioProcessor : toIntPcmAvailableAudioProcessors) {
audioProcessor.reset();
}
for (AudioProcessor audioProcessor : toFloatPcmAvailableAudioProcessors) {
audioProcessor.reset();
}
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playing = false;
offloadDisabledUntilNextConfiguration = false;
}
// Internal methods.
private void resetSinkStateForFlush() {
submittedPcmBytes = 0;
submittedEncodedFrames = 0;
writtenPcmBytes = 0;
writtenEncodedFrames = 0;
isWaitingForOffloadEndOfStreamHandled = false;
framesPerEncodedSample = 0;
mediaPositionParameters =
new MediaPositionParameters(
getAudioProcessorPlaybackParameters(),
getSkipSilenceEnabled(),
/* mediaTimeUs= */ 0,
/* audioTrackPositionUs= */ 0);
startMediaTimeUs = 0;
afterDrainParameters = null;
mediaPositionParametersCheckpoints.clear();
inputBuffer = null;
inputBufferAccessUnitCount = 0;
outputBuffer = null;
stoppedAudioTrack = false;
handledEndOfStream = false;
drainingAudioProcessorIndex = C.INDEX_UNSET;
avSyncHeader = null;
bytesUntilNextAvSync = 0;
trimmingAudioProcessor.resetTrimmedFrameCount();
flushAudioProcessors();
}
/** Releases {@link #keepSessionIdAudioTrack} asynchronously, if it is non-{@code null}. */
private void releaseKeepSessionIdAudioTrack() {
if (keepSessionIdAudioTrack == null) {
return;
}
// AudioTrack.release can take some time, so we call it on a background thread.
final AudioTrack toRelease = keepSessionIdAudioTrack;
keepSessionIdAudioTrack = null;
new Thread() {
@Override
public void run() {
toRelease.release();
}
}.start();
}
@RequiresApi(23)
private void setAudioTrackPlaybackParametersV23(PlaybackParameters audioTrackPlaybackParameters) {
if (isAudioTrackInitialized()) {
PlaybackParams playbackParams =
new PlaybackParams()
.allowDefaults()
.setSpeed(audioTrackPlaybackParameters.speed)
.setPitch(audioTrackPlaybackParameters.pitch)
.setAudioFallbackMode(PlaybackParams.AUDIO_FALLBACK_MODE_FAIL);
try {
audioTrack.setPlaybackParams(playbackParams);
} catch (IllegalArgumentException e) {
Log.w(TAG, "Failed to set playback params", e);
}
// Update the speed using the actual effective speed from the audio track.
audioTrackPlaybackParameters =
new PlaybackParameters(
audioTrack.getPlaybackParams().getSpeed(), audioTrack.getPlaybackParams().getPitch());
audioTrackPositionTracker.setAudioTrackPlaybackSpeed(audioTrackPlaybackParameters.speed);
}
this.audioTrackPlaybackParameters = audioTrackPlaybackParameters;
}
private void setAudioProcessorPlaybackParametersAndSkipSilence(
PlaybackParameters playbackParameters, boolean skipSilence) {
MediaPositionParameters currentMediaPositionParameters = getMediaPositionParameters();
if (!playbackParameters.equals(currentMediaPositionParameters.playbackParameters)
|| skipSilence != currentMediaPositionParameters.skipSilence) {
MediaPositionParameters mediaPositionParameters =
new MediaPositionParameters(
playbackParameters,
skipSilence,
/* mediaTimeUs= */ C.TIME_UNSET,
/* audioTrackPositionUs= */ C.TIME_UNSET);
if (isAudioTrackInitialized()) {
// Drain the audio processors so we can determine the frame position at which the new
// parameters apply.
this.afterDrainParameters = mediaPositionParameters;
} else {
// Update the audio processor chain parameters now. They will be applied to the audio
// processors during initialization.
this.mediaPositionParameters = mediaPositionParameters;
}
}
}
private PlaybackParameters getAudioProcessorPlaybackParameters() {
return getMediaPositionParameters().playbackParameters;
}
private MediaPositionParameters getMediaPositionParameters() {
// Mask the already set parameters.
return afterDrainParameters != null
? afterDrainParameters
: !mediaPositionParametersCheckpoints.isEmpty()
? mediaPositionParametersCheckpoints.getLast()
: mediaPositionParameters;
}
private void applyAudioProcessorPlaybackParametersAndSkipSilence(long presentationTimeUs) {
PlaybackParameters playbackParameters =
configuration.canApplyPlaybackParameters
? audioProcessorChain.applyPlaybackParameters(getAudioProcessorPlaybackParameters())
: PlaybackParameters.DEFAULT;
boolean skipSilenceEnabled =
configuration.canApplyPlaybackParameters
? audioProcessorChain.applySkipSilenceEnabled(getSkipSilenceEnabled())
: DEFAULT_SKIP_SILENCE;
mediaPositionParametersCheckpoints.add(
new MediaPositionParameters(
playbackParameters,
skipSilenceEnabled,
/* mediaTimeUs= */ max(0, presentationTimeUs),
/* audioTrackPositionUs= */ configuration.framesToDurationUs(getWrittenFrames())));
setupAudioProcessors();
if (listener != null) {
listener.onSkipSilenceEnabledChanged(skipSilenceEnabled);
}
}
/**
* Applies and updates media position parameters.
*
* @param positionUs The current audio track position, in microseconds.
* @return The current media time, in microseconds.
*/
private long applyMediaPositionParameters(long positionUs) {
while (!mediaPositionParametersCheckpoints.isEmpty()
&& positionUs >= mediaPositionParametersCheckpoints.getFirst().audioTrackPositionUs) {
// We are playing (or about to play) media with the new parameters, so update them.
mediaPositionParameters = mediaPositionParametersCheckpoints.remove();
}
long playoutDurationSinceLastCheckpoint =
positionUs - mediaPositionParameters.audioTrackPositionUs;
if (!mediaPositionParameters.playbackParameters.equals(PlaybackParameters.DEFAULT)) {
if (mediaPositionParametersCheckpoints.isEmpty()) {
playoutDurationSinceLastCheckpoint =
audioProcessorChain.getMediaDuration(playoutDurationSinceLastCheckpoint);
} else {
// Playing data at a previous playback speed, so fall back to multiplying by the speed.
playoutDurationSinceLastCheckpoint =
Util.getMediaDurationForPlayoutDuration(
playoutDurationSinceLastCheckpoint,
mediaPositionParameters.playbackParameters.speed);
}
}
return mediaPositionParameters.mediaTimeUs + playoutDurationSinceLastCheckpoint;
}
private long applySkipping(long positionUs) {
return positionUs
+ configuration.framesToDurationUs(audioProcessorChain.getSkippedOutputFrameCount());
}
private boolean isAudioTrackInitialized() {
return audioTrack != null;
}
private long getSubmittedFrames() {
return configuration.outputMode == OUTPUT_MODE_PCM
? (submittedPcmBytes / configuration.inputPcmFrameSize)
: submittedEncodedFrames;
}
private long getWrittenFrames() {
return configuration.outputMode == OUTPUT_MODE_PCM
? (writtenPcmBytes / configuration.outputPcmFrameSize)
: writtenEncodedFrames;
}
private static boolean isPassthroughPlaybackSupported(
Format format, @Nullable AudioCapabilities audioCapabilities) {
return getEncodingAndChannelConfigForPassthrough(format, audioCapabilities) != null;
}
/**
* Returns the encoding and channel config to use when configuring an {@link AudioTrack} in
* passthrough mode for the specified {@link Format}. Returns {@code null} if passthrough of the
* format is unsupported.
*
* @param format The {@link Format}.
* @param audioCapabilities The device audio capabilities.
* @return The encoding and channel config to use, or {@code null} if passthrough of the format is
* unsupported.
*/
@Nullable
private static Pair<Integer, Integer> getEncodingAndChannelConfigForPassthrough(
Format format, @Nullable AudioCapabilities audioCapabilities) {
if (audioCapabilities == null) {
return null;
}
@C.Encoding
int encoding =
MimeTypes.getEncoding(Assertions.checkNotNull(format.sampleMimeType), format.codecs);
// Check for encodings that are known to work for passthrough with the implementation in this
// class. This avoids trying to use passthrough with an encoding where the device/app reports
// it's capable but it is untested or known to be broken (for example AAC-LC).
boolean supportedEncoding =
encoding == C.ENCODING_AC3
|| encoding == C.ENCODING_E_AC3
|| encoding == C.ENCODING_E_AC3_JOC
|| encoding == C.ENCODING_AC4
|| encoding == C.ENCODING_DTS
|| encoding == C.ENCODING_DTS_HD
|| encoding == C.ENCODING_DOLBY_TRUEHD;
if (!supportedEncoding) {
return null;
}
// E-AC3 JOC is object based, so any channel count specified in the format is arbitrary. Use 6,
// since the E-AC3 compatible part of the stream is 5.1.
int channelCount = encoding == C.ENCODING_E_AC3_JOC ? 6 : format.channelCount;
if (channelCount > audioCapabilities.getMaxChannelCount()) {
return null;
}
int channelConfig = getChannelConfigForPassthrough(channelCount);
if (channelConfig == AudioFormat.CHANNEL_INVALID) {
return null;
}
if (audioCapabilities.supportsEncoding(encoding)) {
return Pair.create(encoding, channelConfig);
} else if (encoding == C.ENCODING_E_AC3_JOC
&& audioCapabilities.supportsEncoding(C.ENCODING_E_AC3)) {
// E-AC3 receivers support E-AC3 JOC streams (but decode in 2-D rather than 3-D).
return Pair.create(C.ENCODING_E_AC3, channelConfig);
}
return null;
}
private static int getChannelConfigForPassthrough(int channelCount) {
if (Util.SDK_INT <= 28) {
// In passthrough mode the channel count used to configure the audio track doesn't affect how
// the stream is handled, except that some devices do overly-strict channel configuration
// checks. Therefore we override the channel count so that a known-working channel
// configuration is chosen in all cases. See [Internal: b/29116190].
if (channelCount == 7) {
channelCount = 8;
} else if (channelCount == 3 || channelCount == 4 || channelCount == 5) {
channelCount = 6;
}
}
// Workaround for Nexus Player not reporting support for mono passthrough. See
// [Internal: b/34268671].
if (Util.SDK_INT <= 26 && "fugu".equals(Util.DEVICE) && channelCount == 1) {
channelCount = 2;
}
return Util.getAudioTrackChannelConfig(channelCount);
}
private static boolean isOffloadedPlaybackSupported(
Format format, AudioAttributes audioAttributes) {
if (Util.SDK_INT < 29) {
return false;
}
@C.Encoding
int encoding =
MimeTypes.getEncoding(Assertions.checkNotNull(format.sampleMimeType), format.codecs);
if (encoding == C.ENCODING_INVALID) {
return false;
}
int channelConfig = Util.getAudioTrackChannelConfig(format.channelCount);
if (channelConfig == AudioFormat.CHANNEL_INVALID) {
return false;
}
AudioFormat audioFormat = getAudioFormat(format.sampleRate, channelConfig, encoding);
if (!AudioManager.isOffloadedPlaybackSupported(
audioFormat, audioAttributes.getAudioAttributesV21())) {
return false;
}
boolean notGapless = format.encoderDelay == 0 && format.encoderPadding == 0;
return notGapless || isOffloadedGaplessPlaybackSupported();
}
private static boolean isOffloadedPlayback(AudioTrack audioTrack) {
return Util.SDK_INT >= 29 && audioTrack.isOffloadedPlayback();
}
/**
* Returns whether the device supports gapless in offload playback.
*
* <p>Gapless offload is not supported by all devices and there is no API to query its support. As
* a result this detection is currently based on manual testing.
*/
// TODO(internal b/158191844): Add an SDK API to query offload gapless support.
private static boolean isOffloadedGaplessPlaybackSupported() {
return Util.SDK_INT >= 30 && Util.MODEL.startsWith("Pixel");
}
private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE.
int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
@C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
return new AudioTrack(
C.STREAM_TYPE_DEFAULT,
sampleRate,
channelConfig,
encoding,
bufferSize,
AudioTrack.MODE_STATIC,
audioSessionId);
}
private static int getMaximumEncodedRateBytesPerSecond(@C.Encoding int encoding) {
switch (encoding) {
case C.ENCODING_MP3:
return MpegAudioUtil.MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_LC:
return AacUtil.AAC_LC_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_HE_V1:
return AacUtil.AAC_HE_V1_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_HE_V2:
return AacUtil.AAC_HE_V2_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_XHE:
return AacUtil.AAC_XHE_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_ELD:
return AacUtil.AAC_ELD_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AC3:
return Ac3Util.AC3_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_E_AC3:
case C.ENCODING_E_AC3_JOC:
return Ac3Util.E_AC3_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AC4:
return Ac4Util.MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_DTS:
return DtsUtil.DTS_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_DTS_HD:
return DtsUtil.DTS_HD_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_DOLBY_TRUEHD:
return Ac3Util.TRUEHD_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
case C.ENCODING_PCM_24BIT:
case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_AAC_ER_BSAC:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
throw new IllegalArgumentException();
}
}
private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) {
switch (encoding) {
case C.ENCODING_MP3:
int headerDataInBigEndian = Util.getBigEndianInt(buffer, buffer.position());
int frameCount = MpegAudioUtil.parseMpegAudioFrameSampleCount(headerDataInBigEndian);
if (frameCount == C.LENGTH_UNSET) {
throw new IllegalArgumentException();
}
return frameCount;
case C.ENCODING_AAC_LC:
return AacUtil.AAC_LC_AUDIO_SAMPLE_COUNT;
case C.ENCODING_AAC_HE_V1:
case C.ENCODING_AAC_HE_V2:
return AacUtil.AAC_HE_AUDIO_SAMPLE_COUNT;
case C.ENCODING_AAC_XHE:
return AacUtil.AAC_XHE_AUDIO_SAMPLE_COUNT;
case C.ENCODING_AAC_ELD:
return AacUtil.AAC_LD_AUDIO_SAMPLE_COUNT;
case C.ENCODING_DTS:
case C.ENCODING_DTS_HD:
return DtsUtil.parseDtsAudioSampleCount(buffer);
case C.ENCODING_AC3:
case C.ENCODING_E_AC3:
case C.ENCODING_E_AC3_JOC:
return Ac3Util.parseAc3SyncframeAudioSampleCount(buffer);
case C.ENCODING_AC4:
return Ac4Util.parseAc4SyncframeAudioSampleCount(buffer);
case C.ENCODING_DOLBY_TRUEHD:
int syncframeOffset = Ac3Util.findTrueHdSyncframeOffset(buffer);
return syncframeOffset == C.INDEX_UNSET
? 0
: (Ac3Util.parseTrueHdSyncframeAudioSampleCount(buffer, syncframeOffset)
* Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT);
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
case C.ENCODING_PCM_24BIT:
case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_AAC_ER_BSAC:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
throw new IllegalStateException("Unexpected audio encoding: " + encoding);
}
}
@RequiresApi(21)
private static int writeNonBlockingV21(AudioTrack audioTrack, ByteBuffer buffer, int size) {
return audioTrack.write(buffer, size, AudioTrack.WRITE_NON_BLOCKING);
}
@RequiresApi(21)
private int writeNonBlockingWithAvSyncV21(
AudioTrack audioTrack, ByteBuffer buffer, int size, long presentationTimeUs) {
if (Util.SDK_INT >= 26) {
// The underlying platform AudioTrack writes AV sync headers directly.
return audioTrack.write(
buffer, size, AudioTrack.WRITE_NON_BLOCKING, presentationTimeUs * 1000);
}
if (avSyncHeader == null) {
avSyncHeader = ByteBuffer.allocate(16);
avSyncHeader.order(ByteOrder.BIG_ENDIAN);
avSyncHeader.putInt(0x55550001);
}
if (bytesUntilNextAvSync == 0) {
avSyncHeader.putInt(4, size);
avSyncHeader.putLong(8, presentationTimeUs * 1000);
avSyncHeader.position(0);
bytesUntilNextAvSync = size;
}
int avSyncHeaderBytesRemaining = avSyncHeader.remaining();
if (avSyncHeaderBytesRemaining > 0) {
int result =
audioTrack.write(avSyncHeader, avSyncHeaderBytesRemaining, AudioTrack.WRITE_NON_BLOCKING);
if (result < 0) {
bytesUntilNextAvSync = 0;
return result;
}
if (result < avSyncHeaderBytesRemaining) {
return 0;
}
}
int result = writeNonBlockingV21(audioTrack, buffer, size);
if (result < 0) {
bytesUntilNextAvSync = 0;
return result;
}
bytesUntilNextAvSync -= result;
return result;
}
@RequiresApi(21)
private static void setVolumeInternalV21(AudioTrack audioTrack, float volume) {
audioTrack.setVolume(volume);
}
private static void setVolumeInternalV3(AudioTrack audioTrack, float volume) {
audioTrack.setStereoVolume(volume, volume);
}
private void playPendingData() {
if (!stoppedAudioTrack) {
stoppedAudioTrack = true;
audioTrackPositionTracker.handleEndOfStream(getWrittenFrames());
audioTrack.stop();
bytesUntilNextAvSync = 0;
}
}
@RequiresApi(29)
private final class StreamEventCallbackV29 extends AudioTrack.StreamEventCallback {
private final Handler handler;
public StreamEventCallbackV29() {
handler = new Handler();
}
@Override
public void onDataRequest(AudioTrack track, int size) {
Assertions.checkState(track == audioTrack);
if (listener != null && playing) {
// Do not signal that the buffer is emptying if not playing as it is a transient state.
listener.onOffloadBufferEmptying();
}
}
@Override
public void onTearDown(@NonNull AudioTrack track) {
Assertions.checkState(track == audioTrack);
if (listener != null && playing) {
// The audio track was destroyed while in use. Thus a new AudioTrack needs to be created
// and its buffer filled, which will be done on the next handleBuffer call.
// Request this call explicitly in case ExoPlayer is sleeping waiting for a data request.
listener.onOffloadBufferEmptying();
}
}
public void register(AudioTrack audioTrack) {
audioTrack.registerStreamEventCallback(handler::post, this);
}
public void unregister(AudioTrack audioTrack) {
audioTrack.unregisterStreamEventCallback(this);
handler.removeCallbacksAndMessages(/* token= */ null);
}
}
/** Stores parameters used to calculate the current media position. */
private static final class MediaPositionParameters {
/** The playback parameters. */
public final PlaybackParameters playbackParameters;
/** Whether to skip silences. */
public final boolean skipSilence;
/** The media time from which the playback parameters apply, in microseconds. */
public final long mediaTimeUs;
/** The audio track position from which the playback parameters apply, in microseconds. */
public final long audioTrackPositionUs;
private MediaPositionParameters(
PlaybackParameters playbackParameters,
boolean skipSilence,
long mediaTimeUs,
long audioTrackPositionUs) {
this.playbackParameters = playbackParameters;
this.skipSilence = skipSilence;
this.mediaTimeUs = mediaTimeUs;
this.audioTrackPositionUs = audioTrackPositionUs;
}
}
@RequiresApi(21)
private static AudioFormat getAudioFormat(int sampleRate, int channelConfig, int encoding) {
return new AudioFormat.Builder()
.setSampleRate(sampleRate)
.setChannelMask(channelConfig)
.setEncoding(encoding)
.build();
}
private final class PositionTrackerListener implements AudioTrackPositionTracker.Listener {
@Override
public void onPositionFramesMismatch(
long audioTimestampPositionFrames,
long audioTimestampSystemTimeUs,
long systemTimeUs,
long playbackPositionUs) {
String message =
"Spurious audio timestamp (frame position mismatch): "
+ audioTimestampPositionFrames
+ ", "
+ audioTimestampSystemTimeUs
+ ", "
+ systemTimeUs
+ ", "
+ playbackPositionUs
+ ", "
+ getSubmittedFrames()
+ ", "
+ getWrittenFrames();
if (failOnSpuriousAudioTimestamp) {
throw new InvalidAudioTrackTimestampException(message);
}
Log.w(TAG, message);
}
@Override
public void onSystemTimeUsMismatch(
long audioTimestampPositionFrames,
long audioTimestampSystemTimeUs,
long systemTimeUs,
long playbackPositionUs) {
String message =
"Spurious audio timestamp (system clock mismatch): "
+ audioTimestampPositionFrames
+ ", "
+ audioTimestampSystemTimeUs
+ ", "
+ systemTimeUs
+ ", "
+ playbackPositionUs
+ ", "
+ getSubmittedFrames()
+ ", "
+ getWrittenFrames();
if (failOnSpuriousAudioTimestamp) {
throw new InvalidAudioTrackTimestampException(message);
}
Log.w(TAG, message);
}
@Override
public void onInvalidLatency(long latencyUs) {
Log.w(TAG, "Ignoring impossibly large audio latency: " + latencyUs);
}
@Override
public void onPositionAdvancing(long playoutStartSystemTimeMs) {
if (listener != null) {
listener.onPositionAdvancing(playoutStartSystemTimeMs);
}
}
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs) {
if (listener != null) {
long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
listener.onUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}
}
/** Stores configuration relating to the audio format. */
private static final class Configuration {
public final Format inputFormat;
public final int inputPcmFrameSize;
@OutputMode public final int outputMode;
public final int outputPcmFrameSize;
public final int outputSampleRate;
public final int outputChannelConfig;
@C.Encoding public final int outputEncoding;
public final int bufferSize;
public final boolean canApplyPlaybackParameters;
public final AudioProcessor[] availableAudioProcessors;
public Configuration(
Format inputFormat,
int inputPcmFrameSize,
@OutputMode int outputMode,
int outputPcmFrameSize,
int outputSampleRate,
int outputChannelConfig,
int outputEncoding,
int specifiedBufferSize,
boolean enableAudioTrackPlaybackParams,
boolean canApplyPlaybackParameters,
AudioProcessor[] availableAudioProcessors) {
this.inputFormat = inputFormat;
this.inputPcmFrameSize = inputPcmFrameSize;
this.outputMode = outputMode;
this.outputPcmFrameSize = outputPcmFrameSize;
this.outputSampleRate = outputSampleRate;
this.outputChannelConfig = outputChannelConfig;
this.outputEncoding = outputEncoding;
this.canApplyPlaybackParameters = canApplyPlaybackParameters;
this.availableAudioProcessors = availableAudioProcessors;
// Call computeBufferSize() last as it depends on the other configuration values.
this.bufferSize = computeBufferSize(specifiedBufferSize, enableAudioTrackPlaybackParams);
}
/** Returns if the configurations are sufficiently compatible to reuse the audio track. */
public boolean canReuseAudioTrack(Configuration audioTrackConfiguration) {
return audioTrackConfiguration.outputMode == outputMode
&& audioTrackConfiguration.outputEncoding == outputEncoding
&& audioTrackConfiguration.outputSampleRate == outputSampleRate
&& audioTrackConfiguration.outputChannelConfig == outputChannelConfig
&& audioTrackConfiguration.outputPcmFrameSize == outputPcmFrameSize;
}
public long inputFramesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / inputFormat.sampleRate;
}
public long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate;
}
public long durationUsToFrames(long durationUs) {
return (durationUs * outputSampleRate) / C.MICROS_PER_SECOND;
}
public AudioTrack buildAudioTrack(
boolean tunneling, AudioAttributes audioAttributes, int audioSessionId)
throws InitializationException {
AudioTrack audioTrack;
try {
audioTrack = createAudioTrack(tunneling, audioAttributes, audioSessionId);
} catch (UnsupportedOperationException | IllegalArgumentException e) {
throw new InitializationException(
AudioTrack.STATE_UNINITIALIZED,
outputSampleRate,
outputChannelConfig,
bufferSize,
/* isRecoverable= */ outputModeIsOffload(),
e);
}
int state = audioTrack.getState();
if (state != AudioTrack.STATE_INITIALIZED) {
try {
audioTrack.release();
} catch (Exception e) {
// The track has already failed to initialize, so it wouldn't be that surprising if
// release were to fail too. Swallow the exception.
}
throw new InitializationException(
state,
outputSampleRate,
outputChannelConfig,
bufferSize,
/* isRecoverable= */ outputModeIsOffload(),
/* audioTrackException= */ null);
}
return audioTrack;
}
private AudioTrack createAudioTrack(
boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) {
if (Util.SDK_INT >= 29) {
return createAudioTrackV29(tunneling, audioAttributes, audioSessionId);
} else if (Util.SDK_INT >= 21) {
return createAudioTrackV21(tunneling, audioAttributes, audioSessionId);
} else {
return createAudioTrackV9(audioAttributes, audioSessionId);
}
}
@RequiresApi(29)
private AudioTrack createAudioTrackV29(
boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) {
AudioFormat audioFormat =
getAudioFormat(outputSampleRate, outputChannelConfig, outputEncoding);
android.media.AudioAttributes audioTrackAttributes =
getAudioTrackAttributesV21(audioAttributes, tunneling);
return new AudioTrack.Builder()
.setAudioAttributes(audioTrackAttributes)
.setAudioFormat(audioFormat)
.setTransferMode(AudioTrack.MODE_STREAM)
.setBufferSizeInBytes(bufferSize)
.setSessionId(audioSessionId)
.setOffloadedPlayback(outputMode == OUTPUT_MODE_OFFLOAD)
.build();
}
@RequiresApi(21)
private AudioTrack createAudioTrackV21(
boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) {
return new AudioTrack(
getAudioTrackAttributesV21(audioAttributes, tunneling),
getAudioFormat(outputSampleRate, outputChannelConfig, outputEncoding),
bufferSize,
AudioTrack.MODE_STREAM,
audioSessionId);
}
private AudioTrack createAudioTrackV9(AudioAttributes audioAttributes, int audioSessionId) {
int streamType = Util.getStreamTypeForAudioUsage(audioAttributes.usage);
if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
return new AudioTrack(
streamType,
outputSampleRate,
outputChannelConfig,
outputEncoding,
bufferSize,
AudioTrack.MODE_STREAM);
} else {
// Re-attach to the same audio session.
return new AudioTrack(
streamType,
outputSampleRate,
outputChannelConfig,
outputEncoding,
bufferSize,
AudioTrack.MODE_STREAM,
audioSessionId);
}
}
private int computeBufferSize(
int specifiedBufferSize, boolean enableAudioTrackPlaybackParameters) {
if (specifiedBufferSize != 0) {
return specifiedBufferSize;
}
switch (outputMode) {
case OUTPUT_MODE_PCM:
return getPcmDefaultBufferSize(
enableAudioTrackPlaybackParameters ? MAX_PLAYBACK_SPEED : DEFAULT_PLAYBACK_SPEED);
case OUTPUT_MODE_OFFLOAD:
return getEncodedDefaultBufferSize(OFFLOAD_BUFFER_DURATION_US);
case OUTPUT_MODE_PASSTHROUGH:
return getEncodedDefaultBufferSize(PASSTHROUGH_BUFFER_DURATION_US);
default:
throw new IllegalStateException();
}
}
private int getEncodedDefaultBufferSize(long bufferDurationUs) {
int rate = getMaximumEncodedRateBytesPerSecond(outputEncoding);
if (outputEncoding == C.ENCODING_AC3) {
rate *= AC3_BUFFER_MULTIPLICATION_FACTOR;
}
return (int) (bufferDurationUs * rate / C.MICROS_PER_SECOND);
}
private int getPcmDefaultBufferSize(float maxAudioTrackPlaybackSpeed) {
int minBufferSize =
AudioTrack.getMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding);
Assertions.checkState(minBufferSize != AudioTrack.ERROR_BAD_VALUE);
int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
int maxAppBufferSize =
max(minBufferSize, (int) durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
int bufferSize =
Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize);
if (maxAudioTrackPlaybackSpeed != 1f) {
// Maintain the buffer duration by scaling the size accordingly.
bufferSize = Math.round(bufferSize * maxAudioTrackPlaybackSpeed);
}
return bufferSize;
}
@RequiresApi(21)
private static android.media.AudioAttributes getAudioTrackAttributesV21(
AudioAttributes audioAttributes, boolean tunneling) {
if (tunneling) {
return getAudioTrackTunnelingAttributesV21();
} else {
return audioAttributes.getAudioAttributesV21();
}
}
@RequiresApi(21)
private static android.media.AudioAttributes getAudioTrackTunnelingAttributesV21() {
return new android.media.AudioAttributes.Builder()
.setContentType(android.media.AudioAttributes.CONTENT_TYPE_MOVIE)
.setFlags(android.media.AudioAttributes.FLAG_HW_AV_SYNC)
.setUsage(android.media.AudioAttributes.USAGE_MEDIA)
.build();
}
public boolean outputModeIsOffload() {
return outputMode == OUTPUT_MODE_OFFLOAD;
}
}
}
| library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import static java.lang.Math.max;
import static java.lang.Math.min;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.PlaybackParams;
import android.os.ConditionVariable;
import android.os.Handler;
import android.os.SystemClock;
import android.util.Pair;
import androidx.annotation.IntDef;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.audio.AudioProcessor.UnhandledAudioFormatException;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Plays audio data. The implementation delegates to an {@link AudioTrack} and handles playback
* position smoothing, non-blocking writes and reconfiguration.
*
* <p>If tunneling mode is enabled, care must be taken that audio processors do not output buffers
* with a different duration than their input, and buffer processors must produce output
* corresponding to their last input immediately after that input is queued. This means that, for
* example, speed adjustment is not possible while using tunneling.
*/
public final class DefaultAudioSink implements AudioSink {
/**
* Thrown when the audio track has provided a spurious timestamp, if {@link
* #failOnSpuriousAudioTimestamp} is set.
*/
public static final class InvalidAudioTrackTimestampException extends RuntimeException {
/**
* Creates a new invalid timestamp exception with the specified message.
*
* @param message The detail message for this exception.
*/
private InvalidAudioTrackTimestampException(String message) {
super(message);
}
}
/**
* Provides a chain of audio processors, which are used for any user-defined processing and
* applying playback parameters (if supported). Because applying playback parameters can skip and
* stretch/compress audio, the sink will query the chain for information on how to transform its
* output position to map it onto a media position, via {@link #getMediaDuration(long)} and {@link
* #getSkippedOutputFrameCount()}.
*/
public interface AudioProcessorChain {
/**
* Returns the fixed chain of audio processors that will process audio. This method is called
* once during initialization, but audio processors may change state to become active/inactive
* during playback.
*/
AudioProcessor[] getAudioProcessors();
/**
* Configures audio processors to apply the specified playback parameters immediately, returning
* the new playback parameters, which may differ from those passed in. Only called when
* processors have no input pending.
*
* @param playbackParameters The playback parameters to try to apply.
* @return The playback parameters that were actually applied.
*/
PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters);
/**
* Configures audio processors to apply whether to skip silences immediately, returning the new
* value. Only called when processors have no input pending.
*
* @param skipSilenceEnabled Whether silences should be skipped in the audio stream.
* @return The new value.
*/
boolean applySkipSilenceEnabled(boolean skipSilenceEnabled);
/**
* Scales the specified playout duration to take into account speedup due to audio processing,
* returning an input media duration, in arbitrary units.
*/
long getMediaDuration(long playoutDuration);
/**
* Returns the number of output audio frames skipped since the audio processors were last
* flushed.
*/
long getSkippedOutputFrameCount();
}
/**
* The default audio processor chain, which applies a (possibly empty) chain of user-defined audio
* processors followed by {@link SilenceSkippingAudioProcessor} and {@link SonicAudioProcessor}.
*/
public static class DefaultAudioProcessorChain implements AudioProcessorChain {
private final AudioProcessor[] audioProcessors;
private final SilenceSkippingAudioProcessor silenceSkippingAudioProcessor;
private final SonicAudioProcessor sonicAudioProcessor;
/**
* Creates a new default chain of audio processors, with the user-defined {@code
* audioProcessors} applied before silence skipping and speed adjustment processors.
*/
public DefaultAudioProcessorChain(AudioProcessor... audioProcessors) {
this(audioProcessors, new SilenceSkippingAudioProcessor(), new SonicAudioProcessor());
}
/**
* Creates a new default chain of audio processors, with the user-defined {@code
* audioProcessors} applied before silence skipping and speed adjustment processors.
*/
public DefaultAudioProcessorChain(
AudioProcessor[] audioProcessors,
SilenceSkippingAudioProcessor silenceSkippingAudioProcessor,
SonicAudioProcessor sonicAudioProcessor) {
// The passed-in type may be more specialized than AudioProcessor[], so allocate a new array
// rather than using Arrays.copyOf.
this.audioProcessors = new AudioProcessor[audioProcessors.length + 2];
System.arraycopy(
/* src= */ audioProcessors,
/* srcPos= */ 0,
/* dest= */ this.audioProcessors,
/* destPos= */ 0,
/* length= */ audioProcessors.length);
this.silenceSkippingAudioProcessor = silenceSkippingAudioProcessor;
this.sonicAudioProcessor = sonicAudioProcessor;
this.audioProcessors[audioProcessors.length] = silenceSkippingAudioProcessor;
this.audioProcessors[audioProcessors.length + 1] = sonicAudioProcessor;
}
@Override
public AudioProcessor[] getAudioProcessors() {
return audioProcessors;
}
@Override
public PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters) {
float speed = sonicAudioProcessor.setSpeed(playbackParameters.speed);
float pitch = sonicAudioProcessor.setPitch(playbackParameters.pitch);
return new PlaybackParameters(speed, pitch);
}
@Override
public boolean applySkipSilenceEnabled(boolean skipSilenceEnabled) {
silenceSkippingAudioProcessor.setEnabled(skipSilenceEnabled);
return skipSilenceEnabled;
}
@Override
public long getMediaDuration(long playoutDuration) {
return sonicAudioProcessor.scaleDurationForSpeedup(playoutDuration);
}
@Override
public long getSkippedOutputFrameCount() {
return silenceSkippingAudioProcessor.getSkippedFrames();
}
}
/** The default playback speed. */
public static final float DEFAULT_PLAYBACK_SPEED = 1f;
/** The minimum allowed playback speed. Lower values will be constrained to fall in range. */
public static final float MIN_PLAYBACK_SPEED = 0.1f;
/** The maximum allowed playback speed. Higher values will be constrained to fall in range. */
public static final float MAX_PLAYBACK_SPEED = 8f;
/** The minimum allowed pitch factor. Lower values will be constrained to fall in range. */
public static final float MIN_PITCH = 0.1f;
/** The maximum allowed pitch factor. Higher values will be constrained to fall in range. */
public static final float MAX_PITCH = 8f;
/** The default skip silence flag. */
private static final boolean DEFAULT_SKIP_SILENCE = false;
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({OUTPUT_MODE_PCM, OUTPUT_MODE_OFFLOAD, OUTPUT_MODE_PASSTHROUGH})
private @interface OutputMode {}
private static final int OUTPUT_MODE_PCM = 0;
private static final int OUTPUT_MODE_OFFLOAD = 1;
private static final int OUTPUT_MODE_PASSTHROUGH = 2;
/** A minimum length for the {@link AudioTrack} buffer, in microseconds. */
private static final long MIN_BUFFER_DURATION_US = 250_000;
/** A maximum length for the {@link AudioTrack} buffer, in microseconds. */
private static final long MAX_BUFFER_DURATION_US = 750_000;
/** The length for passthrough {@link AudioTrack} buffers, in microseconds. */
private static final long PASSTHROUGH_BUFFER_DURATION_US = 250_000;
/** The length for offload {@link AudioTrack} buffers, in microseconds. */
private static final long OFFLOAD_BUFFER_DURATION_US = 50_000_000;
/**
* A multiplication factor to apply to the minimum buffer size requested by the underlying {@link
* AudioTrack}.
*/
private static final int BUFFER_MULTIPLICATION_FACTOR = 4;
/** To avoid underruns on some devices (e.g., Broadcom 7271), scale up the AC3 buffer duration. */
private static final int AC3_BUFFER_MULTIPLICATION_FACTOR = 2;
/**
* Native error code equivalent of {@link AudioTrack#ERROR_DEAD_OBJECT} to workaround missing
* error code translation on some devices.
*
* <p>On some devices, AudioTrack native error codes are not always converted to their SDK
* equivalent.
*
* <p>For example: {@link AudioTrack#write(byte[], int, int)} can return -32 instead of {@link
* AudioTrack#ERROR_DEAD_OBJECT}.
*/
private static final int ERROR_NATIVE_DEAD_OBJECT = -32;
private static final String TAG = "AudioTrack";
/**
* Whether to enable a workaround for an issue where an audio effect does not keep its session
* active across releasing/initializing a new audio track, on platform builds where
* {@link Util#SDK_INT} < 21.
* <p>
* The flag must be set before creating a player.
*/
public static boolean enablePreV21AudioSessionWorkaround = false;
/**
* Whether to throw an {@link InvalidAudioTrackTimestampException} when a spurious timestamp is
* reported from {@link AudioTrack#getTimestamp}.
* <p>
* The flag must be set before creating a player. Should be set to {@code true} for testing and
* debugging purposes only.
*/
public static boolean failOnSpuriousAudioTimestamp = false;
@Nullable private final AudioCapabilities audioCapabilities;
private final AudioProcessorChain audioProcessorChain;
private final boolean enableFloatOutput;
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final TrimmingAudioProcessor trimmingAudioProcessor;
private final AudioProcessor[] toIntPcmAvailableAudioProcessors;
private final AudioProcessor[] toFloatPcmAvailableAudioProcessors;
private final ConditionVariable releasingConditionVariable;
private final AudioTrackPositionTracker audioTrackPositionTracker;
private final ArrayDeque<MediaPositionParameters> mediaPositionParametersCheckpoints;
private final boolean enableAudioTrackPlaybackParams;
private final boolean enableOffload;
@MonotonicNonNull private StreamEventCallbackV29 offloadStreamEventCallbackV29;
@Nullable private Listener listener;
/**
* Used to keep the audio session active on pre-V21 builds (see {@link #initializeAudioTrack()}).
*/
@Nullable private AudioTrack keepSessionIdAudioTrack;
@Nullable private Configuration pendingConfiguration;
@MonotonicNonNull private Configuration configuration;
@Nullable private AudioTrack audioTrack;
private AudioAttributes audioAttributes;
@Nullable private MediaPositionParameters afterDrainParameters;
private MediaPositionParameters mediaPositionParameters;
private PlaybackParameters audioTrackPlaybackParameters;
@Nullable private ByteBuffer avSyncHeader;
private int bytesUntilNextAvSync;
private long submittedPcmBytes;
private long submittedEncodedFrames;
private long writtenPcmBytes;
private long writtenEncodedFrames;
private int framesPerEncodedSample;
private boolean startMediaTimeUsNeedsSync;
private boolean startMediaTimeUsNeedsInit;
private long startMediaTimeUs;
private float volume;
private AudioProcessor[] activeAudioProcessors;
private ByteBuffer[] outputBuffers;
@Nullable private ByteBuffer inputBuffer;
private int inputBufferAccessUnitCount;
@Nullable private ByteBuffer outputBuffer;
@MonotonicNonNull private byte[] preV21OutputBuffer;
private int preV21OutputBufferOffset;
private int drainingAudioProcessorIndex;
private boolean handledEndOfStream;
private boolean stoppedAudioTrack;
private boolean playing;
private int audioSessionId;
private AuxEffectInfo auxEffectInfo;
private boolean tunneling;
private long lastFeedElapsedRealtimeMs;
private boolean offloadDisabledUntilNextConfiguration;
private boolean isWaitingForOffloadEndOfStreamHandled;
/**
* Creates a new default audio sink.
*
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty.
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors) {
this(audioCapabilities, audioProcessors, /* enableFloatOutput= */ false);
}
/**
* Creates a new default audio sink, optionally using float output for high resolution PCM.
*
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty.
* @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float
* output will be used if the input is 32-bit float, and also if the input is high resolution
* (24-bit or 32-bit) integer PCM. Audio processing (for example, speed adjustment) will not
* be available when float output is in use.
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities,
AudioProcessor[] audioProcessors,
boolean enableFloatOutput) {
this(
audioCapabilities,
new DefaultAudioProcessorChain(audioProcessors),
enableFloatOutput,
/* enableAudioTrackPlaybackParams= */ false,
/* enableOffload= */ false);
}
/**
* Creates a new default audio sink, optionally using float output for high resolution PCM and
* with the specified {@code audioProcessorChain}.
*
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessorChain An {@link AudioProcessorChain} which is used to apply playback
* parameters adjustments. The instance passed in must not be reused in other sinks.
* @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float
* output will be used if the input is 32-bit float, and also if the input is high resolution
* (24-bit or 32-bit) integer PCM. Float output is supported from API level 21. Audio
* processing (for example, speed adjustment) will not be available when float output is in
* use.
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, if supported.
* @param enableOffload Whether to enable audio offload. If an audio format can be both played
* with offload and encoded audio passthrough, it will be played in offload. Audio offload is
* supported from API level 29. Most Android devices can only support one offload {@link
* android.media.AudioTrack} at a time and can invalidate it at any time. Thus an app can
* never be guaranteed that it will be able to play in offload. Audio processing (for example,
* speed adjustment) will not be available when offload is in use.
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities,
AudioProcessorChain audioProcessorChain,
boolean enableFloatOutput,
boolean enableAudioTrackPlaybackParams,
boolean enableOffload) {
this.audioCapabilities = audioCapabilities;
this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain);
this.enableFloatOutput = Util.SDK_INT >= 21 && enableFloatOutput;
this.enableAudioTrackPlaybackParams = Util.SDK_INT >= 23 && enableAudioTrackPlaybackParams;
this.enableOffload = Util.SDK_INT >= 29 && enableOffload;
releasingConditionVariable = new ConditionVariable(true);
audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
trimmingAudioProcessor = new TrimmingAudioProcessor();
ArrayList<AudioProcessor> toIntPcmAudioProcessors = new ArrayList<>();
Collections.addAll(
toIntPcmAudioProcessors,
new ResamplingAudioProcessor(),
channelMappingAudioProcessor,
trimmingAudioProcessor);
Collections.addAll(toIntPcmAudioProcessors, audioProcessorChain.getAudioProcessors());
toIntPcmAvailableAudioProcessors = toIntPcmAudioProcessors.toArray(new AudioProcessor[0]);
toFloatPcmAvailableAudioProcessors = new AudioProcessor[] {new FloatResamplingAudioProcessor()};
volume = 1f;
audioAttributes = AudioAttributes.DEFAULT;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
auxEffectInfo = new AuxEffectInfo(AuxEffectInfo.NO_AUX_EFFECT_ID, 0f);
mediaPositionParameters =
new MediaPositionParameters(
PlaybackParameters.DEFAULT,
DEFAULT_SKIP_SILENCE,
/* mediaTimeUs= */ 0,
/* audioTrackPositionUs= */ 0);
audioTrackPlaybackParameters = PlaybackParameters.DEFAULT;
drainingAudioProcessorIndex = C.INDEX_UNSET;
activeAudioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0];
mediaPositionParametersCheckpoints = new ArrayDeque<>();
}
// AudioSink implementation.
@Override
public void setListener(Listener listener) {
this.listener = listener;
}
@Override
public boolean supportsFormat(Format format) {
return getFormatSupport(format) != SINK_FORMAT_UNSUPPORTED;
}
@Override
@SinkFormatSupport
public int getFormatSupport(Format format) {
if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)) {
if (!Util.isEncodingLinearPcm(format.pcmEncoding)) {
Log.w(TAG, "Invalid PCM encoding: " + format.pcmEncoding);
return SINK_FORMAT_UNSUPPORTED;
}
if (format.pcmEncoding == C.ENCODING_PCM_16BIT
|| (enableFloatOutput && format.pcmEncoding == C.ENCODING_PCM_FLOAT)) {
return SINK_FORMAT_SUPPORTED_DIRECTLY;
}
// We can resample all linear PCM encodings to 16-bit integer PCM, which AudioTrack is
// guaranteed to support.
return SINK_FORMAT_SUPPORTED_WITH_TRANSCODING;
}
if (enableOffload
&& !offloadDisabledUntilNextConfiguration
&& isOffloadedPlaybackSupported(format, audioAttributes)) {
return SINK_FORMAT_SUPPORTED_DIRECTLY;
}
if (isPassthroughPlaybackSupported(format, audioCapabilities)) {
return SINK_FORMAT_SUPPORTED_DIRECTLY;
}
return SINK_FORMAT_UNSUPPORTED;
}
@Override
public long getCurrentPositionUs(boolean sourceEnded) {
if (!isAudioTrackInitialized() || startMediaTimeUsNeedsInit) {
return CURRENT_POSITION_NOT_SET;
}
long positionUs = audioTrackPositionTracker.getCurrentPositionUs(sourceEnded);
positionUs = min(positionUs, configuration.framesToDurationUs(getWrittenFrames()));
return applySkipping(applyMediaPositionParameters(positionUs));
}
@Override
public void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels)
throws ConfigurationException {
int inputPcmFrameSize;
@Nullable AudioProcessor[] availableAudioProcessors;
boolean canApplyPlaybackParameters;
@OutputMode int outputMode;
@C.Encoding int outputEncoding;
int outputSampleRate;
int outputChannelConfig;
int outputPcmFrameSize;
if (MimeTypes.AUDIO_RAW.equals(inputFormat.sampleMimeType)) {
Assertions.checkArgument(Util.isEncodingLinearPcm(inputFormat.pcmEncoding));
inputPcmFrameSize = Util.getPcmFrameSize(inputFormat.pcmEncoding, inputFormat.channelCount);
boolean useFloatOutput =
enableFloatOutput && Util.isEncodingHighResolutionPcm(inputFormat.pcmEncoding);
availableAudioProcessors =
useFloatOutput ? toFloatPcmAvailableAudioProcessors : toIntPcmAvailableAudioProcessors;
canApplyPlaybackParameters = !useFloatOutput;
trimmingAudioProcessor.setTrimFrameCount(
inputFormat.encoderDelay, inputFormat.encoderPadding);
if (Util.SDK_INT < 21 && inputFormat.channelCount == 8 && outputChannels == null) {
// AudioTrack doesn't support 8 channel output before Android L. Discard the last two (side)
// channels to give a 6 channel stream that is supported.
outputChannels = new int[6];
for (int i = 0; i < outputChannels.length; i++) {
outputChannels[i] = i;
}
}
channelMappingAudioProcessor.setChannelMap(outputChannels);
AudioProcessor.AudioFormat outputFormat =
new AudioProcessor.AudioFormat(
inputFormat.sampleRate, inputFormat.channelCount, inputFormat.pcmEncoding);
for (AudioProcessor audioProcessor : availableAudioProcessors) {
try {
AudioProcessor.AudioFormat nextFormat = audioProcessor.configure(outputFormat);
if (audioProcessor.isActive()) {
outputFormat = nextFormat;
}
} catch (UnhandledAudioFormatException e) {
throw new ConfigurationException(e);
}
}
outputMode = OUTPUT_MODE_PCM;
outputEncoding = outputFormat.encoding;
outputSampleRate = outputFormat.sampleRate;
outputChannelConfig = Util.getAudioTrackChannelConfig(outputFormat.channelCount);
outputPcmFrameSize = Util.getPcmFrameSize(outputEncoding, outputFormat.channelCount);
} else {
inputPcmFrameSize = C.LENGTH_UNSET;
availableAudioProcessors = new AudioProcessor[0];
canApplyPlaybackParameters = false;
outputSampleRate = inputFormat.sampleRate;
outputPcmFrameSize = C.LENGTH_UNSET;
if (enableOffload && isOffloadedPlaybackSupported(inputFormat, audioAttributes)) {
outputMode = OUTPUT_MODE_OFFLOAD;
outputEncoding =
MimeTypes.getEncoding(
Assertions.checkNotNull(inputFormat.sampleMimeType), inputFormat.codecs);
outputChannelConfig = Util.getAudioTrackChannelConfig(inputFormat.channelCount);
} else {
outputMode = OUTPUT_MODE_PASSTHROUGH;
@Nullable
Pair<Integer, Integer> encodingAndChannelConfig =
getEncodingAndChannelConfigForPassthrough(inputFormat, audioCapabilities);
if (encodingAndChannelConfig == null) {
throw new ConfigurationException("Unable to configure passthrough for: " + inputFormat);
}
outputEncoding = encodingAndChannelConfig.first;
outputChannelConfig = encodingAndChannelConfig.second;
}
}
if (outputEncoding == C.ENCODING_INVALID) {
throw new ConfigurationException(
"Invalid output encoding (mode=" + outputMode + ") for: " + inputFormat);
}
if (outputChannelConfig == AudioFormat.CHANNEL_INVALID) {
throw new ConfigurationException(
"Invalid output channel config (mode=" + outputMode + ") for: " + inputFormat);
}
offloadDisabledUntilNextConfiguration = false;
Configuration pendingConfiguration =
new Configuration(
inputFormat,
inputPcmFrameSize,
outputMode,
outputPcmFrameSize,
outputSampleRate,
outputChannelConfig,
outputEncoding,
specifiedBufferSize,
enableAudioTrackPlaybackParams,
canApplyPlaybackParameters,
availableAudioProcessors);
if (isAudioTrackInitialized()) {
this.pendingConfiguration = pendingConfiguration;
} else {
configuration = pendingConfiguration;
}
}
private void setupAudioProcessors() {
AudioProcessor[] audioProcessors = configuration.availableAudioProcessors;
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (AudioProcessor audioProcessor : audioProcessors) {
if (audioProcessor.isActive()) {
newAudioProcessors.add(audioProcessor);
} else {
audioProcessor.flush();
}
}
int count = newAudioProcessors.size();
activeAudioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
outputBuffers = new ByteBuffer[count];
flushAudioProcessors();
}
private void flushAudioProcessors() {
for (int i = 0; i < activeAudioProcessors.length; i++) {
AudioProcessor audioProcessor = activeAudioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
}
}
private void initializeAudioTrack() throws InitializationException {
// If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
// the shared memory that's available for audio track buffers. This would in turn cause the
// initialization of the audio track to fail.
releasingConditionVariable.block();
audioTrack = buildAudioTrack();
if (isOffloadedPlayback(audioTrack)) {
registerStreamEventCallbackV29(audioTrack);
audioTrack.setOffloadDelayPadding(
configuration.inputFormat.encoderDelay, configuration.inputFormat.encoderPadding);
}
int audioSessionId = audioTrack.getAudioSessionId();
if (enablePreV21AudioSessionWorkaround) {
if (Util.SDK_INT < 21) {
// The workaround creates an audio track with a two byte buffer on the same session, and
// does not release it until this object is released, which keeps the session active.
if (keepSessionIdAudioTrack != null
&& audioSessionId != keepSessionIdAudioTrack.getAudioSessionId()) {
releaseKeepSessionIdAudioTrack();
}
if (keepSessionIdAudioTrack == null) {
keepSessionIdAudioTrack = initializeKeepSessionIdAudioTrack(audioSessionId);
}
}
}
if (this.audioSessionId != audioSessionId) {
this.audioSessionId = audioSessionId;
if (listener != null) {
listener.onAudioSessionId(audioSessionId);
}
}
audioTrackPositionTracker.setAudioTrack(
audioTrack,
/* isPassthrough= */ configuration.outputMode == OUTPUT_MODE_PASSTHROUGH,
configuration.outputEncoding,
configuration.outputPcmFrameSize,
configuration.bufferSize);
setVolumeInternal();
if (auxEffectInfo.effectId != AuxEffectInfo.NO_AUX_EFFECT_ID) {
audioTrack.attachAuxEffect(auxEffectInfo.effectId);
audioTrack.setAuxEffectSendLevel(auxEffectInfo.sendLevel);
}
startMediaTimeUsNeedsInit = true;
}
@Override
public void play() {
playing = true;
if (isAudioTrackInitialized()) {
audioTrackPositionTracker.start();
audioTrack.play();
}
}
@Override
public void handleDiscontinuity() {
// Force resynchronization after a skipped buffer.
startMediaTimeUsNeedsSync = true;
}
@Override
@SuppressWarnings("ReferenceEquality")
public boolean handleBuffer(
ByteBuffer buffer, long presentationTimeUs, int encodedAccessUnitCount)
throws InitializationException, WriteException {
Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer);
if (pendingConfiguration != null) {
if (!drainToEndOfStream()) {
// There's still pending data in audio processors to write to the track.
return false;
} else if (!pendingConfiguration.canReuseAudioTrack(configuration)) {
playPendingData();
if (hasPendingData()) {
// We're waiting for playout on the current audio track to finish.
return false;
}
flush();
} else {
// The current audio track can be reused for the new configuration.
configuration = pendingConfiguration;
pendingConfiguration = null;
if (isOffloadedPlayback(audioTrack)) {
audioTrack.setOffloadEndOfStream();
audioTrack.setOffloadDelayPadding(
configuration.inputFormat.encoderDelay, configuration.inputFormat.encoderPadding);
isWaitingForOffloadEndOfStreamHandled = true;
}
}
// Re-apply playback parameters.
applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs);
}
if (!isAudioTrackInitialized()) {
initializeAudioTrack();
}
if (startMediaTimeUsNeedsInit) {
startMediaTimeUs = max(0, presentationTimeUs);
startMediaTimeUsNeedsSync = false;
startMediaTimeUsNeedsInit = false;
if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) {
setAudioTrackPlaybackParametersV23(audioTrackPlaybackParameters);
}
applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs);
if (playing) {
play();
}
}
if (!audioTrackPositionTracker.mayHandleBuffer(getWrittenFrames())) {
return false;
}
if (inputBuffer == null) {
// We are seeing this buffer for the first time.
Assertions.checkArgument(buffer.order() == ByteOrder.LITTLE_ENDIAN);
if (!buffer.hasRemaining()) {
// The buffer is empty.
return true;
}
if (configuration.outputMode != OUTPUT_MODE_PCM && framesPerEncodedSample == 0) {
// If this is the first encoded sample, calculate the sample size in frames.
framesPerEncodedSample = getFramesPerEncodedSample(configuration.outputEncoding, buffer);
if (framesPerEncodedSample == 0) {
// We still don't know the number of frames per sample, so drop the buffer.
// For TrueHD this can occur after some seek operations, as not every sample starts with
// a syncframe header. If we chunked samples together so the extracted samples always
// started with a syncframe header, the chunks would be too large.
return true;
}
}
if (afterDrainParameters != null) {
if (!drainToEndOfStream()) {
// Don't process any more input until draining completes.
return false;
}
applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs);
afterDrainParameters = null;
}
// Check that presentationTimeUs is consistent with the expected value.
long expectedPresentationTimeUs =
startMediaTimeUs
+ configuration.inputFramesToDurationUs(
getSubmittedFrames() - trimmingAudioProcessor.getTrimmedFrameCount());
if (!startMediaTimeUsNeedsSync
&& Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) {
Log.e(
TAG,
"Discontinuity detected [expected "
+ expectedPresentationTimeUs
+ ", got "
+ presentationTimeUs
+ "]");
startMediaTimeUsNeedsSync = true;
}
if (startMediaTimeUsNeedsSync) {
if (!drainToEndOfStream()) {
// Don't update timing until pending AudioProcessor buffers are completely drained.
return false;
}
// Adjust startMediaTimeUs to be consistent with the current buffer's start time and the
// number of bytes submitted.
long adjustmentUs = presentationTimeUs - expectedPresentationTimeUs;
startMediaTimeUs += adjustmentUs;
startMediaTimeUsNeedsSync = false;
// Re-apply playback parameters because the startMediaTimeUs changed.
applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs);
if (listener != null && adjustmentUs != 0) {
listener.onPositionDiscontinuity();
}
}
if (configuration.outputMode == OUTPUT_MODE_PCM) {
submittedPcmBytes += buffer.remaining();
} else {
submittedEncodedFrames += framesPerEncodedSample * encodedAccessUnitCount;
}
inputBuffer = buffer;
inputBufferAccessUnitCount = encodedAccessUnitCount;
}
processBuffers(presentationTimeUs);
if (!inputBuffer.hasRemaining()) {
inputBuffer = null;
inputBufferAccessUnitCount = 0;
return true;
}
if (audioTrackPositionTracker.isStalled(getWrittenFrames())) {
Log.w(TAG, "Resetting stalled audio track");
flush();
return true;
}
return false;
}
private AudioTrack buildAudioTrack() throws InitializationException {
try {
return Assertions.checkNotNull(configuration)
.buildAudioTrack(tunneling, audioAttributes, audioSessionId);
} catch (InitializationException e) {
maybeDisableOffload();
if (listener != null) {
listener.onAudioSinkError(e);
}
throw e;
}
}
@RequiresApi(29)
private void registerStreamEventCallbackV29(AudioTrack audioTrack) {
if (offloadStreamEventCallbackV29 == null) {
// Must be lazily initialized to receive stream event callbacks on the current (playback)
// thread as the constructor is not called in the playback thread.
offloadStreamEventCallbackV29 = new StreamEventCallbackV29();
}
offloadStreamEventCallbackV29.register(audioTrack);
}
private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
int count = activeAudioProcessors.length;
int index = count;
while (index >= 0) {
ByteBuffer input = index > 0 ? outputBuffers[index - 1]
: (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
if (index == count) {
writeBuffer(input, avSyncPresentationTimeUs);
} else {
AudioProcessor audioProcessor = activeAudioProcessors[index];
audioProcessor.queueInput(input);
ByteBuffer output = audioProcessor.getOutput();
outputBuffers[index] = output;
if (output.hasRemaining()) {
// Handle the output as input to the next audio processor or the AudioTrack.
index++;
continue;
}
}
if (input.hasRemaining()) {
// The input wasn't consumed and no output was produced, so give up for now.
return;
}
// Get more input from upstream.
index--;
}
}
@SuppressWarnings("ReferenceEquality")
private void writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs) throws WriteException {
if (!buffer.hasRemaining()) {
return;
}
if (outputBuffer != null) {
Assertions.checkArgument(outputBuffer == buffer);
} else {
outputBuffer = buffer;
if (Util.SDK_INT < 21) {
int bytesRemaining = buffer.remaining();
if (preV21OutputBuffer == null || preV21OutputBuffer.length < bytesRemaining) {
preV21OutputBuffer = new byte[bytesRemaining];
}
int originalPosition = buffer.position();
buffer.get(preV21OutputBuffer, 0, bytesRemaining);
buffer.position(originalPosition);
preV21OutputBufferOffset = 0;
}
}
int bytesRemaining = buffer.remaining();
int bytesWrittenOrError = 0; // Error if negative
if (Util.SDK_INT < 21) { // outputMode == OUTPUT_MODE_PCM.
// Work out how many bytes we can write without the risk of blocking.
int bytesToWrite = audioTrackPositionTracker.getAvailableBufferSize(writtenPcmBytes);
if (bytesToWrite > 0) {
bytesToWrite = min(bytesRemaining, bytesToWrite);
bytesWrittenOrError =
audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite);
if (bytesWrittenOrError > 0) { // No error
preV21OutputBufferOffset += bytesWrittenOrError;
buffer.position(buffer.position() + bytesWrittenOrError);
}
}
} else if (tunneling) {
Assertions.checkState(avSyncPresentationTimeUs != C.TIME_UNSET);
bytesWrittenOrError =
writeNonBlockingWithAvSyncV21(
audioTrack, buffer, bytesRemaining, avSyncPresentationTimeUs);
} else {
bytesWrittenOrError = writeNonBlockingV21(audioTrack, buffer, bytesRemaining);
}
lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
if (bytesWrittenOrError < 0) {
int error = bytesWrittenOrError;
boolean isRecoverable = isAudioTrackDeadObject(error);
if (isRecoverable) {
maybeDisableOffload();
}
WriteException e = new WriteException(error, isRecoverable);
if (listener != null) {
listener.onAudioSinkError(e);
}
throw e;
}
int bytesWritten = bytesWrittenOrError;
if (isOffloadedPlayback(audioTrack)) {
// After calling AudioTrack.setOffloadEndOfStream, the AudioTrack internally stops and
// restarts during which AudioTrack.write will return 0. This situation must be detected to
// prevent reporting the buffer as full even though it is not which could lead ExoPlayer to
// sleep forever waiting for a onDataRequest that will never come.
if (writtenEncodedFrames > 0) {
isWaitingForOffloadEndOfStreamHandled = false;
}
// Consider the offload buffer as full if the AudioTrack is playing and AudioTrack.write could
// not write all the data provided to it. This relies on the assumption that AudioTrack.write
// always writes as much as possible.
if (playing
&& listener != null
&& bytesWritten < bytesRemaining
&& !isWaitingForOffloadEndOfStreamHandled) {
long pendingDurationMs =
audioTrackPositionTracker.getPendingBufferDurationMs(writtenEncodedFrames);
listener.onOffloadBufferFull(pendingDurationMs);
}
}
if (configuration.outputMode == OUTPUT_MODE_PCM) {
writtenPcmBytes += bytesWritten;
}
if (bytesWritten == bytesRemaining) {
if (configuration.outputMode != OUTPUT_MODE_PCM) {
// When playing non-PCM, the inputBuffer is never processed, thus the last inputBuffer
// must be the current input buffer.
Assertions.checkState(buffer == inputBuffer);
writtenEncodedFrames += framesPerEncodedSample * inputBufferAccessUnitCount;
}
outputBuffer = null;
}
}
@Override
public void playToEndOfStream() throws WriteException {
if (!handledEndOfStream && isAudioTrackInitialized() && drainToEndOfStream()) {
playPendingData();
handledEndOfStream = true;
}
}
private void maybeDisableOffload() {
if (!configuration.outputModeIsOffload()) {
return;
}
// Offload was requested, but may not be available. There are cases when this can occur even if
// AudioManager.isOffloadedPlaybackSupported returned true. For example, due to use of an
// AudioPlaybackCaptureConfiguration. Disable offload until the sink is next configured.
offloadDisabledUntilNextConfiguration = true;
}
private static boolean isAudioTrackDeadObject(int status) {
return (Util.SDK_INT >= 24 && status == AudioTrack.ERROR_DEAD_OBJECT)
|| status == ERROR_NATIVE_DEAD_OBJECT;
}
private boolean drainToEndOfStream() throws WriteException {
boolean audioProcessorNeedsEndOfStream = false;
if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
drainingAudioProcessorIndex = 0;
audioProcessorNeedsEndOfStream = true;
}
while (drainingAudioProcessorIndex < activeAudioProcessors.length) {
AudioProcessor audioProcessor = activeAudioProcessors[drainingAudioProcessorIndex];
if (audioProcessorNeedsEndOfStream) {
audioProcessor.queueEndOfStream();
}
processBuffers(C.TIME_UNSET);
if (!audioProcessor.isEnded()) {
return false;
}
audioProcessorNeedsEndOfStream = true;
drainingAudioProcessorIndex++;
}
// Finish writing any remaining output to the track.
if (outputBuffer != null) {
writeBuffer(outputBuffer, C.TIME_UNSET);
if (outputBuffer != null) {
return false;
}
}
drainingAudioProcessorIndex = C.INDEX_UNSET;
return true;
}
@Override
public boolean isEnded() {
return !isAudioTrackInitialized() || (handledEndOfStream && !hasPendingData());
}
@Override
public boolean hasPendingData() {
return isAudioTrackInitialized()
&& audioTrackPositionTracker.hasPendingData(getWrittenFrames());
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
playbackParameters =
new PlaybackParameters(
Util.constrainValue(playbackParameters.speed, MIN_PLAYBACK_SPEED, MAX_PLAYBACK_SPEED),
Util.constrainValue(playbackParameters.pitch, MIN_PITCH, MAX_PITCH));
if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) {
setAudioTrackPlaybackParametersV23(playbackParameters);
} else {
setAudioProcessorPlaybackParametersAndSkipSilence(
playbackParameters, getSkipSilenceEnabled());
}
}
@Override
public PlaybackParameters getPlaybackParameters() {
return enableAudioTrackPlaybackParams
? audioTrackPlaybackParameters
: getAudioProcessorPlaybackParameters();
}
@Override
public void setSkipSilenceEnabled(boolean skipSilenceEnabled) {
setAudioProcessorPlaybackParametersAndSkipSilence(
getAudioProcessorPlaybackParameters(), skipSilenceEnabled);
}
@Override
public boolean getSkipSilenceEnabled() {
return getMediaPositionParameters().skipSilence;
}
@Override
public void setAudioAttributes(AudioAttributes audioAttributes) {
if (this.audioAttributes.equals(audioAttributes)) {
return;
}
this.audioAttributes = audioAttributes;
if (tunneling) {
// The audio attributes are ignored in tunneling mode, so no need to reset.
return;
}
flush();
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
@Override
public void setAudioSessionId(int audioSessionId) {
if (this.audioSessionId != audioSessionId) {
this.audioSessionId = audioSessionId;
flush();
}
}
@Override
public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) {
if (this.auxEffectInfo.equals(auxEffectInfo)) {
return;
}
int effectId = auxEffectInfo.effectId;
float sendLevel = auxEffectInfo.sendLevel;
if (audioTrack != null) {
if (this.auxEffectInfo.effectId != effectId) {
audioTrack.attachAuxEffect(effectId);
}
if (effectId != AuxEffectInfo.NO_AUX_EFFECT_ID) {
audioTrack.setAuxEffectSendLevel(sendLevel);
}
}
this.auxEffectInfo = auxEffectInfo;
}
@Override
public void enableTunnelingV21(int tunnelingAudioSessionId) {
Assertions.checkState(Util.SDK_INT >= 21);
if (!tunneling || audioSessionId != tunnelingAudioSessionId) {
tunneling = true;
audioSessionId = tunnelingAudioSessionId;
flush();
}
}
@Override
public void disableTunneling() {
if (tunneling) {
tunneling = false;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
flush();
}
}
@Override
public void setVolume(float volume) {
if (this.volume != volume) {
this.volume = volume;
setVolumeInternal();
}
}
private void setVolumeInternal() {
if (!isAudioTrackInitialized()) {
// Do nothing.
} else if (Util.SDK_INT >= 21) {
setVolumeInternalV21(audioTrack, volume);
} else {
setVolumeInternalV3(audioTrack, volume);
}
}
@Override
public void pause() {
playing = false;
if (isAudioTrackInitialized() && audioTrackPositionTracker.pause()) {
audioTrack.pause();
}
}
@Override
public void flush() {
if (isAudioTrackInitialized()) {
resetSinkStateForFlush();
if (audioTrackPositionTracker.isPlaying()) {
audioTrack.pause();
}
if (isOffloadedPlayback(audioTrack)) {
Assertions.checkNotNull(offloadStreamEventCallbackV29).unregister(audioTrack);
}
// AudioTrack.release can take some time, so we call it on a background thread.
final AudioTrack toRelease = audioTrack;
audioTrack = null;
if (pendingConfiguration != null) {
configuration = pendingConfiguration;
pendingConfiguration = null;
}
audioTrackPositionTracker.reset();
releasingConditionVariable.close();
new Thread("ExoPlayer:AudioTrackReleaseThread") {
@Override
public void run() {
try {
toRelease.flush();
toRelease.release();
} finally {
releasingConditionVariable.open();
}
}
}.start();
}
}
@Override
public void experimentalFlushWithoutAudioTrackRelease() {
// Prior to SDK 25, AudioTrack flush does not work as intended, and therefore it must be
// released and reinitialized. (Internal reference: b/143500232)
if (Util.SDK_INT < 25) {
flush();
return;
}
if (!isAudioTrackInitialized()) {
return;
}
resetSinkStateForFlush();
if (audioTrackPositionTracker.isPlaying()) {
audioTrack.pause();
}
audioTrack.flush();
audioTrackPositionTracker.reset();
audioTrackPositionTracker.setAudioTrack(
audioTrack,
/* isPassthrough= */ configuration.outputMode == OUTPUT_MODE_PASSTHROUGH,
configuration.outputEncoding,
configuration.outputPcmFrameSize,
configuration.bufferSize);
startMediaTimeUsNeedsInit = true;
}
@Override
public void reset() {
flush();
releaseKeepSessionIdAudioTrack();
for (AudioProcessor audioProcessor : toIntPcmAvailableAudioProcessors) {
audioProcessor.reset();
}
for (AudioProcessor audioProcessor : toFloatPcmAvailableAudioProcessors) {
audioProcessor.reset();
}
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playing = false;
offloadDisabledUntilNextConfiguration = false;
}
// Internal methods.
private void resetSinkStateForFlush() {
submittedPcmBytes = 0;
submittedEncodedFrames = 0;
writtenPcmBytes = 0;
writtenEncodedFrames = 0;
isWaitingForOffloadEndOfStreamHandled = false;
framesPerEncodedSample = 0;
mediaPositionParameters =
new MediaPositionParameters(
getAudioProcessorPlaybackParameters(),
getSkipSilenceEnabled(),
/* mediaTimeUs= */ 0,
/* audioTrackPositionUs= */ 0);
startMediaTimeUs = 0;
afterDrainParameters = null;
mediaPositionParametersCheckpoints.clear();
inputBuffer = null;
inputBufferAccessUnitCount = 0;
outputBuffer = null;
stoppedAudioTrack = false;
handledEndOfStream = false;
drainingAudioProcessorIndex = C.INDEX_UNSET;
avSyncHeader = null;
bytesUntilNextAvSync = 0;
trimmingAudioProcessor.resetTrimmedFrameCount();
flushAudioProcessors();
}
/** Releases {@link #keepSessionIdAudioTrack} asynchronously, if it is non-{@code null}. */
private void releaseKeepSessionIdAudioTrack() {
if (keepSessionIdAudioTrack == null) {
return;
}
// AudioTrack.release can take some time, so we call it on a background thread.
final AudioTrack toRelease = keepSessionIdAudioTrack;
keepSessionIdAudioTrack = null;
new Thread() {
@Override
public void run() {
toRelease.release();
}
}.start();
}
@RequiresApi(23)
private void setAudioTrackPlaybackParametersV23(PlaybackParameters audioTrackPlaybackParameters) {
if (isAudioTrackInitialized()) {
PlaybackParams playbackParams =
new PlaybackParams()
.allowDefaults()
.setSpeed(audioTrackPlaybackParameters.speed)
.setPitch(audioTrackPlaybackParameters.pitch)
.setAudioFallbackMode(PlaybackParams.AUDIO_FALLBACK_MODE_FAIL);
try {
audioTrack.setPlaybackParams(playbackParams);
} catch (IllegalArgumentException e) {
Log.w(TAG, "Failed to set playback params", e);
}
// Update the speed using the actual effective speed from the audio track.
audioTrackPlaybackParameters =
new PlaybackParameters(
audioTrack.getPlaybackParams().getSpeed(), audioTrack.getPlaybackParams().getPitch());
audioTrackPositionTracker.setAudioTrackPlaybackSpeed(audioTrackPlaybackParameters.speed);
}
this.audioTrackPlaybackParameters = audioTrackPlaybackParameters;
}
private void setAudioProcessorPlaybackParametersAndSkipSilence(
PlaybackParameters playbackParameters, boolean skipSilence) {
MediaPositionParameters currentMediaPositionParameters = getMediaPositionParameters();
if (!playbackParameters.equals(currentMediaPositionParameters.playbackParameters)
|| skipSilence != currentMediaPositionParameters.skipSilence) {
MediaPositionParameters mediaPositionParameters =
new MediaPositionParameters(
playbackParameters,
skipSilence,
/* mediaTimeUs= */ C.TIME_UNSET,
/* audioTrackPositionUs= */ C.TIME_UNSET);
if (isAudioTrackInitialized()) {
// Drain the audio processors so we can determine the frame position at which the new
// parameters apply.
this.afterDrainParameters = mediaPositionParameters;
} else {
// Update the audio processor chain parameters now. They will be applied to the audio
// processors during initialization.
this.mediaPositionParameters = mediaPositionParameters;
}
}
}
private PlaybackParameters getAudioProcessorPlaybackParameters() {
return getMediaPositionParameters().playbackParameters;
}
private MediaPositionParameters getMediaPositionParameters() {
// Mask the already set parameters.
return afterDrainParameters != null
? afterDrainParameters
: !mediaPositionParametersCheckpoints.isEmpty()
? mediaPositionParametersCheckpoints.getLast()
: mediaPositionParameters;
}
private void applyAudioProcessorPlaybackParametersAndSkipSilence(long presentationTimeUs) {
PlaybackParameters playbackParameters =
configuration.canApplyPlaybackParameters
? audioProcessorChain.applyPlaybackParameters(getAudioProcessorPlaybackParameters())
: PlaybackParameters.DEFAULT;
boolean skipSilenceEnabled =
configuration.canApplyPlaybackParameters
? audioProcessorChain.applySkipSilenceEnabled(getSkipSilenceEnabled())
: DEFAULT_SKIP_SILENCE;
mediaPositionParametersCheckpoints.add(
new MediaPositionParameters(
playbackParameters,
skipSilenceEnabled,
/* mediaTimeUs= */ max(0, presentationTimeUs),
/* audioTrackPositionUs= */ configuration.framesToDurationUs(getWrittenFrames())));
setupAudioProcessors();
if (listener != null) {
listener.onSkipSilenceEnabledChanged(skipSilenceEnabled);
}
}
/**
* Applies and updates media position parameters.
*
* @param positionUs The current audio track position, in microseconds.
* @return The current media time, in microseconds.
*/
private long applyMediaPositionParameters(long positionUs) {
while (!mediaPositionParametersCheckpoints.isEmpty()
&& positionUs >= mediaPositionParametersCheckpoints.getFirst().audioTrackPositionUs) {
// We are playing (or about to play) media with the new parameters, so update them.
mediaPositionParameters = mediaPositionParametersCheckpoints.remove();
}
long playoutDurationSinceLastCheckpoint =
positionUs - mediaPositionParameters.audioTrackPositionUs;
if (!mediaPositionParameters.playbackParameters.equals(PlaybackParameters.DEFAULT)) {
if (mediaPositionParametersCheckpoints.isEmpty()) {
playoutDurationSinceLastCheckpoint =
audioProcessorChain.getMediaDuration(playoutDurationSinceLastCheckpoint);
} else {
// Playing data at a previous playback speed, so fall back to multiplying by the speed.
playoutDurationSinceLastCheckpoint =
Util.getMediaDurationForPlayoutDuration(
playoutDurationSinceLastCheckpoint,
mediaPositionParameters.playbackParameters.speed);
}
}
return mediaPositionParameters.mediaTimeUs + playoutDurationSinceLastCheckpoint;
}
private long applySkipping(long positionUs) {
return positionUs
+ configuration.framesToDurationUs(audioProcessorChain.getSkippedOutputFrameCount());
}
private boolean isAudioTrackInitialized() {
return audioTrack != null;
}
private long getSubmittedFrames() {
return configuration.outputMode == OUTPUT_MODE_PCM
? (submittedPcmBytes / configuration.inputPcmFrameSize)
: submittedEncodedFrames;
}
private long getWrittenFrames() {
return configuration.outputMode == OUTPUT_MODE_PCM
? (writtenPcmBytes / configuration.outputPcmFrameSize)
: writtenEncodedFrames;
}
private static boolean isPassthroughPlaybackSupported(
Format format, @Nullable AudioCapabilities audioCapabilities) {
return getEncodingAndChannelConfigForPassthrough(format, audioCapabilities) != null;
}
/**
* Returns the encoding and channel config to use when configuring an {@link AudioTrack} in
* passthrough mode for the specified {@link Format}. Returns {@code null} if passthrough of the
* format is unsupported.
*
* @param format The {@link Format}.
* @param audioCapabilities The device audio capabilities.
* @return The encoding and channel config to use, or {@code null} if passthrough of the format is
* unsupported.
*/
@Nullable
private static Pair<Integer, Integer> getEncodingAndChannelConfigForPassthrough(
Format format, @Nullable AudioCapabilities audioCapabilities) {
if (audioCapabilities == null) {
return null;
}
@C.Encoding
int encoding =
MimeTypes.getEncoding(Assertions.checkNotNull(format.sampleMimeType), format.codecs);
// Check for encodings that are known to work for passthrough with the implementation in this
// class. This avoids trying to use passthrough with an encoding where the device/app reports
// it's capable but it is untested or known to be broken (for example AAC-LC).
boolean supportedEncoding =
encoding == C.ENCODING_AC3
|| encoding == C.ENCODING_E_AC3
|| encoding == C.ENCODING_E_AC3_JOC
|| encoding == C.ENCODING_AC4
|| encoding == C.ENCODING_DTS
|| encoding == C.ENCODING_DTS_HD
|| encoding == C.ENCODING_DOLBY_TRUEHD;
if (!supportedEncoding) {
return null;
}
// E-AC3 JOC is object based, so any channel count specified in the format is arbitrary. Use 6,
// since the E-AC3 compatible part of the stream is 5.1.
int channelCount = encoding == C.ENCODING_E_AC3_JOC ? 6 : format.channelCount;
if (channelCount > audioCapabilities.getMaxChannelCount()) {
return null;
}
int channelConfig = getChannelConfigForPassthrough(channelCount);
if (channelConfig == AudioFormat.CHANNEL_INVALID) {
return null;
}
if (audioCapabilities.supportsEncoding(encoding)) {
return Pair.create(encoding, channelConfig);
} else if (encoding == C.ENCODING_E_AC3_JOC
&& audioCapabilities.supportsEncoding(C.ENCODING_E_AC3)) {
// E-AC3 receivers support E-AC3 JOC streams (but decode in 2-D rather than 3-D).
return Pair.create(C.ENCODING_E_AC3, channelConfig);
}
return null;
}
private static int getChannelConfigForPassthrough(int channelCount) {
if (Util.SDK_INT <= 28) {
// In passthrough mode the channel count used to configure the audio track doesn't affect how
// the stream is handled, except that some devices do overly-strict channel configuration
// checks. Therefore we override the channel count so that a known-working channel
// configuration is chosen in all cases. See [Internal: b/29116190].
if (channelCount == 7) {
channelCount = 8;
} else if (channelCount == 3 || channelCount == 4 || channelCount == 5) {
channelCount = 6;
}
}
// Workaround for Nexus Player not reporting support for mono passthrough. See
// [Internal: b/34268671].
if (Util.SDK_INT <= 26 && "fugu".equals(Util.DEVICE) && channelCount == 1) {
channelCount = 2;
}
return Util.getAudioTrackChannelConfig(channelCount);
}
private static boolean isOffloadedPlaybackSupported(
Format format, AudioAttributes audioAttributes) {
if (Util.SDK_INT < 29) {
return false;
}
@C.Encoding
int encoding =
MimeTypes.getEncoding(Assertions.checkNotNull(format.sampleMimeType), format.codecs);
if (encoding == C.ENCODING_INVALID) {
return false;
}
int channelConfig = Util.getAudioTrackChannelConfig(format.channelCount);
if (channelConfig == AudioFormat.CHANNEL_INVALID) {
return false;
}
AudioFormat audioFormat = getAudioFormat(format.sampleRate, channelConfig, encoding);
if (!AudioManager.isOffloadedPlaybackSupported(
audioFormat, audioAttributes.getAudioAttributesV21())) {
return false;
}
boolean notGapless = format.encoderDelay == 0 && format.encoderPadding == 0;
return notGapless || isOffloadedGaplessPlaybackSupported();
}
private static boolean isOffloadedPlayback(AudioTrack audioTrack) {
return Util.SDK_INT >= 29 && audioTrack.isOffloadedPlayback();
}
/**
* Returns whether the device supports gapless in offload playback.
*
* <p>Gapless offload is not supported by all devices and there is no API to query its support. As
* a result this detection is currently based on manual testing.
*/
// TODO(internal b/158191844): Add an SDK API to query offload gapless support.
private static boolean isOffloadedGaplessPlaybackSupported() {
return Util.SDK_INT >= 30 && Util.MODEL.startsWith("Pixel");
}
private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE.
int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
@C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
return new AudioTrack(
C.STREAM_TYPE_DEFAULT,
sampleRate,
channelConfig,
encoding,
bufferSize,
AudioTrack.MODE_STATIC,
audioSessionId);
}
private static int getMaximumEncodedRateBytesPerSecond(@C.Encoding int encoding) {
switch (encoding) {
case C.ENCODING_MP3:
return MpegAudioUtil.MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_LC:
return AacUtil.AAC_LC_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_HE_V1:
return AacUtil.AAC_HE_V1_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_HE_V2:
return AacUtil.AAC_HE_V2_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_XHE:
return AacUtil.AAC_XHE_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AAC_ELD:
return AacUtil.AAC_ELD_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AC3:
return Ac3Util.AC3_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_E_AC3:
case C.ENCODING_E_AC3_JOC:
return Ac3Util.E_AC3_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_AC4:
return Ac4Util.MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_DTS:
return DtsUtil.DTS_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_DTS_HD:
return DtsUtil.DTS_HD_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_DOLBY_TRUEHD:
return Ac3Util.TRUEHD_MAX_RATE_BYTES_PER_SECOND;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
case C.ENCODING_PCM_24BIT:
case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_AAC_ER_BSAC:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
throw new IllegalArgumentException();
}
}
private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) {
switch (encoding) {
case C.ENCODING_MP3:
int headerDataInBigEndian = Util.getBigEndianInt(buffer, buffer.position());
int frameCount = MpegAudioUtil.parseMpegAudioFrameSampleCount(headerDataInBigEndian);
if (frameCount == C.LENGTH_UNSET) {
throw new IllegalArgumentException();
}
return frameCount;
case C.ENCODING_AAC_LC:
return AacUtil.AAC_LC_AUDIO_SAMPLE_COUNT;
case C.ENCODING_AAC_HE_V1:
case C.ENCODING_AAC_HE_V2:
return AacUtil.AAC_HE_AUDIO_SAMPLE_COUNT;
case C.ENCODING_AAC_XHE:
return AacUtil.AAC_XHE_AUDIO_SAMPLE_COUNT;
case C.ENCODING_AAC_ELD:
return AacUtil.AAC_LD_AUDIO_SAMPLE_COUNT;
case C.ENCODING_DTS:
case C.ENCODING_DTS_HD:
return DtsUtil.parseDtsAudioSampleCount(buffer);
case C.ENCODING_AC3:
case C.ENCODING_E_AC3:
case C.ENCODING_E_AC3_JOC:
return Ac3Util.parseAc3SyncframeAudioSampleCount(buffer);
case C.ENCODING_AC4:
return Ac4Util.parseAc4SyncframeAudioSampleCount(buffer);
case C.ENCODING_DOLBY_TRUEHD:
int syncframeOffset = Ac3Util.findTrueHdSyncframeOffset(buffer);
return syncframeOffset == C.INDEX_UNSET
? 0
: (Ac3Util.parseTrueHdSyncframeAudioSampleCount(buffer, syncframeOffset)
* Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT);
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
case C.ENCODING_PCM_24BIT:
case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_AAC_ER_BSAC:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
throw new IllegalStateException("Unexpected audio encoding: " + encoding);
}
}
@RequiresApi(21)
private static int writeNonBlockingV21(AudioTrack audioTrack, ByteBuffer buffer, int size) {
return audioTrack.write(buffer, size, AudioTrack.WRITE_NON_BLOCKING);
}
@RequiresApi(21)
private int writeNonBlockingWithAvSyncV21(
AudioTrack audioTrack, ByteBuffer buffer, int size, long presentationTimeUs) {
if (Util.SDK_INT >= 26) {
// The underlying platform AudioTrack writes AV sync headers directly.
return audioTrack.write(
buffer, size, AudioTrack.WRITE_NON_BLOCKING, presentationTimeUs * 1000);
}
if (avSyncHeader == null) {
avSyncHeader = ByteBuffer.allocate(16);
avSyncHeader.order(ByteOrder.BIG_ENDIAN);
avSyncHeader.putInt(0x55550001);
}
if (bytesUntilNextAvSync == 0) {
avSyncHeader.putInt(4, size);
avSyncHeader.putLong(8, presentationTimeUs * 1000);
avSyncHeader.position(0);
bytesUntilNextAvSync = size;
}
int avSyncHeaderBytesRemaining = avSyncHeader.remaining();
if (avSyncHeaderBytesRemaining > 0) {
int result =
audioTrack.write(avSyncHeader, avSyncHeaderBytesRemaining, AudioTrack.WRITE_NON_BLOCKING);
if (result < 0) {
bytesUntilNextAvSync = 0;
return result;
}
if (result < avSyncHeaderBytesRemaining) {
return 0;
}
}
int result = writeNonBlockingV21(audioTrack, buffer, size);
if (result < 0) {
bytesUntilNextAvSync = 0;
return result;
}
bytesUntilNextAvSync -= result;
return result;
}
@RequiresApi(21)
private static void setVolumeInternalV21(AudioTrack audioTrack, float volume) {
audioTrack.setVolume(volume);
}
private static void setVolumeInternalV3(AudioTrack audioTrack, float volume) {
audioTrack.setStereoVolume(volume, volume);
}
private void playPendingData() {
if (!stoppedAudioTrack) {
stoppedAudioTrack = true;
audioTrackPositionTracker.handleEndOfStream(getWrittenFrames());
audioTrack.stop();
bytesUntilNextAvSync = 0;
}
}
@RequiresApi(29)
private final class StreamEventCallbackV29 extends AudioTrack.StreamEventCallback {
private final Handler handler;
public StreamEventCallbackV29() {
handler = new Handler();
}
@Override
public void onDataRequest(AudioTrack track, int size) {
Assertions.checkState(track == DefaultAudioSink.this.audioTrack);
if (listener != null) {
listener.onOffloadBufferEmptying();
}
}
@Override
public void onTearDown(@NonNull AudioTrack track) {
if (listener != null && playing) {
// A new Audio Track needs to be created and it's buffer filled, which will be done on the
// next handleBuffer call.
// Request this call explicitly in case ExoPlayer is sleeping waiting for a data request.
listener.onOffloadBufferEmptying();
}
}
public void register(AudioTrack audioTrack) {
audioTrack.registerStreamEventCallback(handler::post, this);
}
public void unregister(AudioTrack audioTrack) {
audioTrack.unregisterStreamEventCallback(this);
handler.removeCallbacksAndMessages(/* token= */ null);
}
}
/** Stores parameters used to calculate the current media position. */
private static final class MediaPositionParameters {
/** The playback parameters. */
public final PlaybackParameters playbackParameters;
/** Whether to skip silences. */
public final boolean skipSilence;
/** The media time from which the playback parameters apply, in microseconds. */
public final long mediaTimeUs;
/** The audio track position from which the playback parameters apply, in microseconds. */
public final long audioTrackPositionUs;
private MediaPositionParameters(
PlaybackParameters playbackParameters,
boolean skipSilence,
long mediaTimeUs,
long audioTrackPositionUs) {
this.playbackParameters = playbackParameters;
this.skipSilence = skipSilence;
this.mediaTimeUs = mediaTimeUs;
this.audioTrackPositionUs = audioTrackPositionUs;
}
}
@RequiresApi(21)
private static AudioFormat getAudioFormat(int sampleRate, int channelConfig, int encoding) {
return new AudioFormat.Builder()
.setSampleRate(sampleRate)
.setChannelMask(channelConfig)
.setEncoding(encoding)
.build();
}
private final class PositionTrackerListener implements AudioTrackPositionTracker.Listener {
@Override
public void onPositionFramesMismatch(
long audioTimestampPositionFrames,
long audioTimestampSystemTimeUs,
long systemTimeUs,
long playbackPositionUs) {
String message =
"Spurious audio timestamp (frame position mismatch): "
+ audioTimestampPositionFrames
+ ", "
+ audioTimestampSystemTimeUs
+ ", "
+ systemTimeUs
+ ", "
+ playbackPositionUs
+ ", "
+ getSubmittedFrames()
+ ", "
+ getWrittenFrames();
if (failOnSpuriousAudioTimestamp) {
throw new InvalidAudioTrackTimestampException(message);
}
Log.w(TAG, message);
}
@Override
public void onSystemTimeUsMismatch(
long audioTimestampPositionFrames,
long audioTimestampSystemTimeUs,
long systemTimeUs,
long playbackPositionUs) {
String message =
"Spurious audio timestamp (system clock mismatch): "
+ audioTimestampPositionFrames
+ ", "
+ audioTimestampSystemTimeUs
+ ", "
+ systemTimeUs
+ ", "
+ playbackPositionUs
+ ", "
+ getSubmittedFrames()
+ ", "
+ getWrittenFrames();
if (failOnSpuriousAudioTimestamp) {
throw new InvalidAudioTrackTimestampException(message);
}
Log.w(TAG, message);
}
@Override
public void onInvalidLatency(long latencyUs) {
Log.w(TAG, "Ignoring impossibly large audio latency: " + latencyUs);
}
@Override
public void onPositionAdvancing(long playoutStartSystemTimeMs) {
if (listener != null) {
listener.onPositionAdvancing(playoutStartSystemTimeMs);
}
}
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs) {
if (listener != null) {
long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
listener.onUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}
}
/** Stores configuration relating to the audio format. */
private static final class Configuration {
public final Format inputFormat;
public final int inputPcmFrameSize;
@OutputMode public final int outputMode;
public final int outputPcmFrameSize;
public final int outputSampleRate;
public final int outputChannelConfig;
@C.Encoding public final int outputEncoding;
public final int bufferSize;
public final boolean canApplyPlaybackParameters;
public final AudioProcessor[] availableAudioProcessors;
public Configuration(
Format inputFormat,
int inputPcmFrameSize,
@OutputMode int outputMode,
int outputPcmFrameSize,
int outputSampleRate,
int outputChannelConfig,
int outputEncoding,
int specifiedBufferSize,
boolean enableAudioTrackPlaybackParams,
boolean canApplyPlaybackParameters,
AudioProcessor[] availableAudioProcessors) {
this.inputFormat = inputFormat;
this.inputPcmFrameSize = inputPcmFrameSize;
this.outputMode = outputMode;
this.outputPcmFrameSize = outputPcmFrameSize;
this.outputSampleRate = outputSampleRate;
this.outputChannelConfig = outputChannelConfig;
this.outputEncoding = outputEncoding;
this.canApplyPlaybackParameters = canApplyPlaybackParameters;
this.availableAudioProcessors = availableAudioProcessors;
// Call computeBufferSize() last as it depends on the other configuration values.
this.bufferSize = computeBufferSize(specifiedBufferSize, enableAudioTrackPlaybackParams);
}
/** Returns if the configurations are sufficiently compatible to reuse the audio track. */
public boolean canReuseAudioTrack(Configuration audioTrackConfiguration) {
return audioTrackConfiguration.outputMode == outputMode
&& audioTrackConfiguration.outputEncoding == outputEncoding
&& audioTrackConfiguration.outputSampleRate == outputSampleRate
&& audioTrackConfiguration.outputChannelConfig == outputChannelConfig
&& audioTrackConfiguration.outputPcmFrameSize == outputPcmFrameSize;
}
public long inputFramesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / inputFormat.sampleRate;
}
public long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate;
}
public long durationUsToFrames(long durationUs) {
return (durationUs * outputSampleRate) / C.MICROS_PER_SECOND;
}
public AudioTrack buildAudioTrack(
boolean tunneling, AudioAttributes audioAttributes, int audioSessionId)
throws InitializationException {
AudioTrack audioTrack;
try {
audioTrack = createAudioTrack(tunneling, audioAttributes, audioSessionId);
} catch (UnsupportedOperationException | IllegalArgumentException e) {
throw new InitializationException(
AudioTrack.STATE_UNINITIALIZED,
outputSampleRate,
outputChannelConfig,
bufferSize,
/* isRecoverable= */ outputModeIsOffload(),
e);
}
int state = audioTrack.getState();
if (state != AudioTrack.STATE_INITIALIZED) {
try {
audioTrack.release();
} catch (Exception e) {
// The track has already failed to initialize, so it wouldn't be that surprising if
// release were to fail too. Swallow the exception.
}
throw new InitializationException(
state,
outputSampleRate,
outputChannelConfig,
bufferSize,
/* isRecoverable= */ outputModeIsOffload(),
/* audioTrackException= */ null);
}
return audioTrack;
}
private AudioTrack createAudioTrack(
boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) {
if (Util.SDK_INT >= 29) {
return createAudioTrackV29(tunneling, audioAttributes, audioSessionId);
} else if (Util.SDK_INT >= 21) {
return createAudioTrackV21(tunneling, audioAttributes, audioSessionId);
} else {
return createAudioTrackV9(audioAttributes, audioSessionId);
}
}
@RequiresApi(29)
private AudioTrack createAudioTrackV29(
boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) {
AudioFormat audioFormat =
getAudioFormat(outputSampleRate, outputChannelConfig, outputEncoding);
android.media.AudioAttributes audioTrackAttributes =
getAudioTrackAttributesV21(audioAttributes, tunneling);
return new AudioTrack.Builder()
.setAudioAttributes(audioTrackAttributes)
.setAudioFormat(audioFormat)
.setTransferMode(AudioTrack.MODE_STREAM)
.setBufferSizeInBytes(bufferSize)
.setSessionId(audioSessionId)
.setOffloadedPlayback(outputMode == OUTPUT_MODE_OFFLOAD)
.build();
}
@RequiresApi(21)
private AudioTrack createAudioTrackV21(
boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) {
return new AudioTrack(
getAudioTrackAttributesV21(audioAttributes, tunneling),
getAudioFormat(outputSampleRate, outputChannelConfig, outputEncoding),
bufferSize,
AudioTrack.MODE_STREAM,
audioSessionId);
}
private AudioTrack createAudioTrackV9(AudioAttributes audioAttributes, int audioSessionId) {
int streamType = Util.getStreamTypeForAudioUsage(audioAttributes.usage);
if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
return new AudioTrack(
streamType,
outputSampleRate,
outputChannelConfig,
outputEncoding,
bufferSize,
AudioTrack.MODE_STREAM);
} else {
// Re-attach to the same audio session.
return new AudioTrack(
streamType,
outputSampleRate,
outputChannelConfig,
outputEncoding,
bufferSize,
AudioTrack.MODE_STREAM,
audioSessionId);
}
}
private int computeBufferSize(
int specifiedBufferSize, boolean enableAudioTrackPlaybackParameters) {
if (specifiedBufferSize != 0) {
return specifiedBufferSize;
}
switch (outputMode) {
case OUTPUT_MODE_PCM:
return getPcmDefaultBufferSize(
enableAudioTrackPlaybackParameters ? MAX_PLAYBACK_SPEED : DEFAULT_PLAYBACK_SPEED);
case OUTPUT_MODE_OFFLOAD:
return getEncodedDefaultBufferSize(OFFLOAD_BUFFER_DURATION_US);
case OUTPUT_MODE_PASSTHROUGH:
return getEncodedDefaultBufferSize(PASSTHROUGH_BUFFER_DURATION_US);
default:
throw new IllegalStateException();
}
}
private int getEncodedDefaultBufferSize(long bufferDurationUs) {
int rate = getMaximumEncodedRateBytesPerSecond(outputEncoding);
if (outputEncoding == C.ENCODING_AC3) {
rate *= AC3_BUFFER_MULTIPLICATION_FACTOR;
}
return (int) (bufferDurationUs * rate / C.MICROS_PER_SECOND);
}
private int getPcmDefaultBufferSize(float maxAudioTrackPlaybackSpeed) {
int minBufferSize =
AudioTrack.getMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding);
Assertions.checkState(minBufferSize != AudioTrack.ERROR_BAD_VALUE);
int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
int maxAppBufferSize =
max(minBufferSize, (int) durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
int bufferSize =
Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize);
if (maxAudioTrackPlaybackSpeed != 1f) {
// Maintain the buffer duration by scaling the size accordingly.
bufferSize = Math.round(bufferSize * maxAudioTrackPlaybackSpeed);
}
return bufferSize;
}
@RequiresApi(21)
private static android.media.AudioAttributes getAudioTrackAttributesV21(
AudioAttributes audioAttributes, boolean tunneling) {
if (tunneling) {
return getAudioTrackTunnelingAttributesV21();
} else {
return audioAttributes.getAudioAttributesV21();
}
}
@RequiresApi(21)
private static android.media.AudioAttributes getAudioTrackTunnelingAttributesV21() {
return new android.media.AudioAttributes.Builder()
.setContentType(android.media.AudioAttributes.CONTENT_TYPE_MOVIE)
.setFlags(android.media.AudioAttributes.FLAG_HW_AV_SYNC)
.setUsage(android.media.AudioAttributes.USAGE_MEDIA)
.build();
}
public boolean outputModeIsOffload() {
return outputMode == OUTPUT_MODE_OFFLOAD;
}
}
}
| Clarify offload stream event callback impl
#exo-offload
PiperOrigin-RevId: 333532900
| library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java | Clarify offload stream event callback impl |
|
Java | apache-2.0 | 629ac1738c786f2bcc197a32b825865b414e0912 | 0 | raydac/netbeans-mmd-plugin,raydac/netbeans-mmd-plugin,raydac/netbeans-mmd-plugin,raydac/netbeans-mmd-plugin | /*
* Copyright (C) 2018 Igor Maznitsa.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package com.igormaznitsa.sciareto.ui.tree;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.DropMode;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.SwingUtilities;
import javax.swing.ToolTipManager;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import com.igormaznitsa.meta.common.utils.GetUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import com.igormaznitsa.meta.annotation.MustNotContainNull;
import com.igormaznitsa.meta.common.utils.Assertions;
import com.igormaznitsa.mindmap.model.ExtraFile;
import com.igormaznitsa.mindmap.model.MMapURI;
import com.igormaznitsa.mindmap.model.MindMap;
import com.igormaznitsa.mindmap.model.Topic;
import com.igormaznitsa.mindmap.model.logger.Logger;
import com.igormaznitsa.mindmap.model.logger.LoggerFactory;
import com.igormaznitsa.mindmap.swing.panel.MindMapPanel;
import com.igormaznitsa.mindmap.swing.panel.utils.Utils;
import com.igormaznitsa.sciareto.Context;
import com.igormaznitsa.sciareto.Main;
import com.igormaznitsa.sciareto.preferences.PrefUtils;
import com.igormaznitsa.sciareto.ui.DialogProviderManager;
import com.igormaznitsa.sciareto.ui.FindFilesForTextPanel;
import com.igormaznitsa.sciareto.ui.FindUsagesPanel;
import com.igormaznitsa.sciareto.ui.MainFrame;
import com.igormaznitsa.sciareto.ui.UiUtils;
import com.igormaznitsa.sciareto.ui.editors.EditorContentType;
import com.igormaznitsa.sciareto.ui.editors.MMDEditor;
import com.igormaznitsa.sciareto.ui.tabs.TabTitle;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JTree;
public final class ExplorerTree extends JScrollPane {
private static final long serialVersionUID = 3894835807758698784L;
private static final Logger LOGGER = LoggerFactory.getLogger(ExplorerTree.class);
private final DnDTree projectTree;
private final Context context;
private static final Icon CROSS_ICON = new ImageIcon(UiUtils.loadIcon("nimbusCloseFrame.png"));
public ExplorerTree(@Nonnull final Context context) throws IOException {
super();
this.projectTree = new DnDTree();
this.context = context;
this.projectTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
this.projectTree.setDropMode(DropMode.ON);
this.projectTree.setEditable(true);
ToolTipManager.sharedInstance().registerComponent(this.projectTree);
this.projectTree.setCellRenderer(new TreeCellRenderer());
this.projectTree.setModel(new NodeProjectGroup(context, ".")); //NOI18N
this.projectTree.setRootVisible(false);
this.setViewportView(this.projectTree);
this.projectTree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(@Nonnull final KeyEvent e) {
if (!e.isConsumed() && e.getModifiers() == 0 && e.getKeyCode() == KeyEvent.VK_ENTER) {
e.consume();
final TreePath selectedPath = projectTree.getSelectionPath();
if (selectedPath != null) {
final NodeFileOrFolder node = (NodeFileOrFolder) selectedPath.getLastPathComponent();
if (node != null) {
if (!node.isLoading() && node.isLeaf()) {
final File file = node.makeFileForNode();
if (file != null && !context.openFileAsTab(file)) {
UiUtils.openInSystemViewer(file);
}
} else {
projectTree.expandPath(selectedPath);
}
}
}
}
}
});
this.projectTree.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(@Nonnull final MouseEvent e) {
if (e.getClickCount() > 1) {
final int selRow = projectTree.getRowForLocation(e.getX(), e.getY());
final TreePath selPath = projectTree.getPathForLocation(e.getX(), e.getY());
if (selRow >= 0) {
final NodeFileOrFolder node = (NodeFileOrFolder) selPath.getLastPathComponent();
if (node != null && !node.isLoading() && node.isLeaf()) {
final File file = node.makeFileForNode();
if (file != null) {
if (context.openFileAsTab(file)) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
context.centerRootTopicIfFocusedMMD();
}
});
} else {
UiUtils.openInSystemViewer(file);
}
}
}
}
}
}
@Override
public void mouseReleased(@Nonnull final MouseEvent e) {
if (e.isPopupTrigger()) {
processPopup(e);
}
}
@Override
public void mousePressed(@Nonnull final MouseEvent e) {
if (e.isPopupTrigger()) {
processPopup(e);
}
}
private void processPopup(@Nonnull final MouseEvent e) {
final TreePath selPath = projectTree.getPathForLocation(e.getX(), e.getY());
if (selPath != null) {
projectTree.setSelectionPath(selPath);
final Object last = selPath.getLastPathComponent();
if (last instanceof NodeFileOrFolder) {
if (((NodeFileOrFolder)last).isLoading()) {
makePopupMenuForLoading((NodeFileOrFolder) last).show(e.getComponent(), e.getX(), e.getY());
} else {
makePopupMenu((NodeFileOrFolder) last).show(e.getComponent(), e.getX(), e.getY());
}
}
}
}
});
}
@Nonnull
public JTree getProjectTree() {
return this.projectTree;
}
public boolean hasSelectedItem() {
return this.projectTree.getSelectionPath() != null;
}
public void showPopUpForSelectedItem() {
final TreePath path = this.projectTree.getSelectionPath();
if (path != null) {
final NodeFileOrFolder component = (NodeFileOrFolder) path.getLastPathComponent();
final Rectangle rect = this.projectTree.getRowBounds(this.projectTree.getRowForPath(path));
final JPopupMenu popupMenu = component.isLoading() ? makePopupMenuForLoading(component) : makePopupMenu(component);
popupMenu.show(this.projectTree, rect.x + rect.width / 2, rect.y + rect.height / 2);
}
}
@Override
public void requestFocus() {
this.projectTree.requestFocus();
}
@Nonnull
@MustNotContainNull
public List<NodeFileOrFolder> findForNamePattern(@Nullable final Pattern namePattern) {
return getCurrentGroup().findForNamePattern(namePattern);
}
@Nonnull
@MustNotContainNull
public List<NodeFileOrFolder> findNodesForFile(@Nonnull final File file) {
return getCurrentGroup().findRelatedNodes(file, new ArrayList<NodeFileOrFolder>());
}
public void сloseProject(@Nonnull final NodeProject tree) {
((NodeProjectGroup) this.projectTree.getModel()).removeProject(tree);
this.context.onCloseProject(tree);
}
public void focusToFirstElement() {
this.projectTree.focusToFirstElement();
}
public void focusToFileItem(@Nonnull final File file) {
final NodeProjectGroup group = getCurrentGroup();
final TreePath pathToFile = group.findPathToFile(file);
if (pathToFile != null) {
this.projectTree.setSelectionPath(pathToFile);
this.projectTree.scrollPathToVisible(pathToFile);
}
}
public void unfoldProject(@Nonnull final NodeProject node) {
Utils.safeSwingCall(new Runnable() {
@Override
public void run() {
projectTree.expandPath(new TreePath(new Object[]{getCurrentGroup(), node}));
}
});
}
@Nonnull
private JPopupMenu makePopupMenuForLoading(@Nonnull final NodeFileOrFolder node) {
final JPopupMenu result = new JPopupMenu();
if (node instanceof NodeProject) {
final JMenuItem stopAndClose = new JMenuItem("Cancel and remove", CROSS_ICON);
stopAndClose.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
((NodeProject) node).cancel();
сloseProject((NodeProject) node);
}
});
result.add(stopAndClose);
}
return result;
}
@Nonnull
private JPopupMenu makePopupMenu(@Nonnull final NodeFileOrFolder node) {
final JPopupMenu result = new JPopupMenu();
if (!node.isLeaf()) {
final JMenu makeNew = new JMenu("New...");
JMenuItem item = new JMenuItem("Folder");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addChildTo(node, null);
}
});
makeNew.add(item);
item = new JMenuItem("Mind map");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addChildTo(node, "mmd"); //NOI18N
}
});
makeNew.add(item);
item = new JMenuItem("Text");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addChildTo(node, "txt"); //NOI18N
}
});
makeNew.add(item);
item = new JMenuItem("PlantUML");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addChildTo(node, "puml"); //NOI18N
}
});
makeNew.add(item);
result.add(makeNew);
}
if (!node.isProjectKnowledgeFolder()) {
final JMenuItem rename = new JMenuItem("Rename");
rename.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
projectTree.startEditingAtPath(node.makeTreePath());
}
});
result.add(rename);
}
if (node instanceof NodeProject) {
final JMenuItem close = new JMenuItem("Close");
close.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
if (DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(Main.getApplicationFrame(), "Close '" + node + '\'', "Do you really want close '" + node + "\'?\nIt will be just removed from the tree.")) {
сloseProject((NodeProject) node);
}
}
});
result.add(close);
final JMenuItem refresh = new JMenuItem("Reload");
refresh.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
getCurrentGroup().startProjectFolderRefresh((NodeProject) node);
}
});
result.add(refresh);
}
final JMenuItem delete = new JMenuItem("Delete");
delete.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
if (DialogProviderManager.getInstance().getDialogProvider().msgConfirmYesNo(null, "Delete", "Do you really want to delete \"" + node + "\"?")) {
context.deleteTreeNode(node);
}
}
});
result.add(delete);
final JMenuItem openInSystem = new JMenuItem("Open in System");
openInSystem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
final File file = node.makeFileForNode();
if (file != null && file.exists()) {
UiUtils.openInSystemViewer(file);
}
}
});
result.add(openInSystem);
if (node instanceof NodeProject) {
final NodeProject theProject = (NodeProject) node;
if (!theProject.hasKnowledgeFolder()) {
final File knowledgeFolder = new File(theProject.getFolder(), Context.KNOWLEDGE_FOLDER);
final JMenuItem addKnowledgeFolder = new JMenuItem("Create Knowledge folder");
addKnowledgeFolder.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
if (knowledgeFolder.mkdirs()) {
getCurrentGroup().startProjectFolderRefresh(theProject, new Runnable() {
@Override
public void run() {
context.focusInTree(knowledgeFolder);
}
});
} else {
LOGGER.error("Can't create knowledge folder : " + knowledgeFolder); //NOI18N
}
}
});
result.add(addKnowledgeFolder);
}
}
final String BUILD_GRAPH_ITEM = "Build file links graph";
if (node instanceof NodeProject) {
final JMenuItem buildMindMapGraph = new JMenuItem(BUILD_GRAPH_ITEM);
buildMindMapGraph.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
context.showGraphMindMapFileLinksDialog(((NodeProject) node).getFolder(), null, true);
}
});
result.add(buildMindMapGraph);
} else if (node.isLeaf() && node.isMindMapFile()) {
final JMenuItem buildMindMapGraph = new JMenuItem(BUILD_GRAPH_ITEM);
buildMindMapGraph.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
final NodeProject project = node.findProject();
context.showGraphMindMapFileLinksDialog(project == null ? null : project.getFolder(), node.makeFileForNode(), true);
}
});
result.add(buildMindMapGraph);
}
final List<JMenuItem> optional = new ArrayList<>();
final JMenuItem menuSearchUsage = new JMenuItem("Find in maps");
menuSearchUsage.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
if (context.hasUnsavedDocument() && !DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(null, "Detected unsaved documents", "Unsaved content will not be processed!")) {
return;
}
final FindUsagesPanel panel = new FindUsagesPanel(context, node, false);
if (DialogProviderManager.getInstance().getDialogProvider().msgOkCancel(null, "Find usages in all opened projects", panel)) {
final NodeFileOrFolder selected = panel.getSelected();
panel.dispose();
if (selected != null) {
final File file = selected.makeFileForNode();
if (file != null) {
context.focusInTree(file);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
requestFocus();
}
});
}
}
} else {
panel.dispose();
}
}
});
optional.add(menuSearchUsage);
if (!node.isLeaf()) {
final JMenuItem menuSearchFilesForText = new JMenuItem("Find files for text");
menuSearchFilesForText.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
final FindFilesForTextPanel panel = new FindFilesForTextPanel(context, node, JOptionPane.OK_OPTION);
if (DialogProviderManager.getInstance().getDialogProvider().msgOkCancel(null, "Find files for text in '" + node.name + '\'', panel)) {
final NodeFileOrFolder selected = panel.getSelected();
panel.dispose();
if (selected != null) {
final File file = selected.makeFileForNode();
if (file != null) {
context.focusInTree(file);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
requestFocus();
}
});
}
}
} else {
panel.dispose();
}
}
});
optional.add(menuSearchFilesForText);
}
final TabTitle editingTab = this.context.getFocusedTab();
if (editingTab != null && editingTab.getType() == EditorContentType.MINDMAP) {
final JMenuItem addIntoMap = new JMenuItem("Add File as topic");
addIntoMap.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addTreeAsTopic(context.findProjectForFile(editingTab.getAssociatedFile()), node, ((MMDEditor) editingTab.getProvider().getEditor()));
}
});
optional.add(addIntoMap);
}
if (!optional.isEmpty()) {
result.add(new JSeparator());
for (final JMenuItem i : optional) {
result.add(i);
}
}
return result;
}
private void addTreeAsTopic(@Nullable final NodeProject project, @Nonnull final NodeFileOrFolder node, @Nonnull final MMDEditor editor) {
final File projectFolder = project == null ? null : project.getFolder();
if (project != null) {
if (node.findProject() != project) {
if (!DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(null, "Different projects", "Opened Map file from another project. File paths will not be relative ones.")) {
return;
}
}
}
final List<Topic> targetTopics = new ArrayList<>(Arrays.asList(editor.getMindMapPanel().getSelectedTopics()));
if (targetTopics.size() > 1) {
if (!DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(null, "Multiple selection detected", "New children will be generated for all focused topics.")) {
return;
}
} else {
if (targetTopics.isEmpty() && editor.getMindMapPanel().getModel().getRoot() != null) {
if (!DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(null, "No selected parent", "There is not selected topic. The Root will be used as the parent.")) {
return;
}
targetTopics.add(editor.getMindMapPanel().getModel().getRoot());
}
}
editor.getMindMapPanel().executeModelJobs(new MindMapPanel.ModelJob() {
@Nonnull
private Topic recursiveGenerateTopics(@Nullable final File projectFolder, @Nonnull final MindMap model, @Nullable final Topic parent, @Nonnull final NodeFileOrFolder node) {
final ExtraFile fileLink = new ExtraFile(new MMapURI(projectFolder, node.makeFileForNode(), null));
final Topic theTopic;
if (parent == null) {
theTopic = new Topic(model, null, node.toString(), fileLink);
} else {
theTopic = parent.makeChild(node.toString(), null);
theTopic.setExtra(fileLink);
}
if (!node.isLeaf()) {
final Enumeration<NodeFileOrFolder> children = node.children();
while (children.hasMoreElements()) {
recursiveGenerateTopics(projectFolder, model, theTopic, children.nextElement());
}
}
return theTopic;
}
@Override
public boolean doChangeModel(@Nonnull final MindMap model) {
Topic createdTopic = null;
if (targetTopics.isEmpty()) {
createdTopic = recursiveGenerateTopics(projectFolder, model, null, node);
} else {
boolean first = true;
for (final Topic t : targetTopics) {
final Topic generated = recursiveGenerateTopics(projectFolder, model, t, node);
if (first) {
createdTopic = generated;
}
first = false;
}
}
if (editor.getMindMapPanel().getSelectedTopics().length == 0 && createdTopic != null) {
final Topic forfocus = createdTopic;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
editor.getMindMapPanel().focusTo(forfocus);
}
});
}
return true;
}
});
}
private void addChildTo(@Nonnull final NodeFileOrFolder folder, @Nullable final String extension) {
String fileName = JOptionPane.showInputDialog(Main.getApplicationFrame(), extension == null ? "Folder name" : "File name", extension == null ? "New folder" : "New " + extension.toUpperCase(Locale.ENGLISH) + " file", JOptionPane.QUESTION_MESSAGE);
if (fileName != null) {
fileName = fileName.trim();
if (NodeProjectGroup.FILE_NAME.matcher(fileName).matches()) {
if (extension != null) {
final String providedExtension = FilenameUtils.getExtension(fileName);
if (!extension.equalsIgnoreCase(providedExtension)) {
fileName += '.' + extension;
}
}
final File file = new File(folder.makeFileForNode(), fileName);
if (file.exists()) {
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "File '" + fileName + "' already exists!");
return;
}
boolean ok = false;
if (extension == null) {
if (!file.mkdirs()) {
LOGGER.error("Can't create folder"); //NOI18N
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Can't create folder '" + fileName + "'!");
} else {
ok = true;
}
} else {
switch (extension) {
case "mmd": { //NOI18N
final MindMap model = new MindMap(null, true);
model.setAttribute("showJumps", "true"); //NOI18N
final Topic root = model.getRoot();
if (root != null) {
root.setText("Root"); //NOI18N
}
try {
FileUtils.write(file, model.write(new StringWriter()).toString(), "UTF-8"); //NOI18N
ok = true;
} catch (IOException ex) {
LOGGER.error("Can't create MMD file", ex); //NOI18N
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Can't create mind map '" + fileName + "'!");
}
}
break;
case "puml": { //NOI18N
final String nextLine = GetUtils.ensureNonNull(System.getProperty("line.separator"), "\n");
final String text = "@startuml " + nextLine + nextLine + "@enduml";
try {
FileUtils.write(file, text, "UTF-8"); //NOI18N
ok = true;
} catch (IOException ex) {
LOGGER.error("Can't create PUML file", ex); //NOI18N
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Can't create puml file '" + fileName + "'!");
}
}
break;
case "txt": { //NOI18N
try {
FileUtils.write(file, "", "UTF-8"); //NOI18N
ok = true;
} catch (IOException ex) {
LOGGER.error("Can't create TXT file", ex); //NOI18N
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Can't create txt file '" + fileName + "'!");
}
}
break;
default:
throw new Error("Unexpected extension : " + extension); //NOI18N
}
}
if (ok) {
try {
getCurrentGroup().addChild(folder, PrefUtils.isShowHiddenFilesAndFolders(), file);
context.openFileAsTab(file);
context.focusInTree(file);
} catch (IOException ex) {
MainFrame.showExceptionDialog(ex);
}
}
} else {
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Illegal file name!");
}
}
}
public boolean deleteNode(@Nonnull final NodeFileOrFolder node) {
return getCurrentGroup().fireNotificationThatNodeDeleted(node);
}
@Nonnull
public NodeProjectGroup getCurrentGroup() {
return (NodeProjectGroup) this.projectTree.getModel();
}
public void setModel(@Nonnull final NodeProjectGroup model, final boolean expandFirst) {
this.projectTree.setModel(Assertions.assertNotNull(model));
if (expandFirst && model.getChildCount() > 0) {
this.projectTree.expandPath(new TreePath(new Object[]{model, model.getChildAt(0)}));
}
}
}
| mind-map/scia-reto/src/main/java/com/igormaznitsa/sciareto/ui/tree/ExplorerTree.java | /*
* Copyright (C) 2018 Igor Maznitsa.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package com.igormaznitsa.sciareto.ui.tree;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.DropMode;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.SwingUtilities;
import javax.swing.ToolTipManager;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import com.igormaznitsa.meta.common.utils.GetUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import com.igormaznitsa.meta.annotation.MustNotContainNull;
import com.igormaznitsa.meta.common.utils.Assertions;
import com.igormaznitsa.mindmap.model.ExtraFile;
import com.igormaznitsa.mindmap.model.MMapURI;
import com.igormaznitsa.mindmap.model.MindMap;
import com.igormaznitsa.mindmap.model.Topic;
import com.igormaznitsa.mindmap.model.logger.Logger;
import com.igormaznitsa.mindmap.model.logger.LoggerFactory;
import com.igormaznitsa.mindmap.swing.panel.MindMapPanel;
import com.igormaznitsa.mindmap.swing.panel.utils.Utils;
import com.igormaznitsa.sciareto.Context;
import com.igormaznitsa.sciareto.Main;
import com.igormaznitsa.sciareto.preferences.PrefUtils;
import com.igormaznitsa.sciareto.ui.DialogProviderManager;
import com.igormaznitsa.sciareto.ui.FindFilesForTextPanel;
import com.igormaznitsa.sciareto.ui.FindUsagesPanel;
import com.igormaznitsa.sciareto.ui.MainFrame;
import com.igormaznitsa.sciareto.ui.UiUtils;
import com.igormaznitsa.sciareto.ui.editors.EditorContentType;
import com.igormaznitsa.sciareto.ui.editors.MMDEditor;
import com.igormaznitsa.sciareto.ui.tabs.TabTitle;
import javax.swing.JTree;
public final class ExplorerTree extends JScrollPane {
private static final long serialVersionUID = 3894835807758698784L;
private static final Logger LOGGER = LoggerFactory.getLogger(ExplorerTree.class);
private final DnDTree projectTree;
private final Context context;
public ExplorerTree(@Nonnull final Context context) throws IOException {
super();
this.projectTree = new DnDTree();
this.context = context;
this.projectTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
this.projectTree.setDropMode(DropMode.ON);
this.projectTree.setEditable(true);
ToolTipManager.sharedInstance().registerComponent(this.projectTree);
this.projectTree.setCellRenderer(new TreeCellRenderer());
this.projectTree.setModel(new NodeProjectGroup(context, ".")); //NOI18N
this.projectTree.setRootVisible(false);
this.setViewportView(this.projectTree);
this.projectTree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(@Nonnull final KeyEvent e) {
if (!e.isConsumed() && e.getModifiers() == 0 && e.getKeyCode() == KeyEvent.VK_ENTER) {
e.consume();
final TreePath selectedPath = projectTree.getSelectionPath();
if (selectedPath != null) {
final NodeFileOrFolder node = (NodeFileOrFolder) selectedPath.getLastPathComponent();
if (node != null) {
if (!node.isLoading() && node.isLeaf()) {
final File file = node.makeFileForNode();
if (file != null && !context.openFileAsTab(file)) {
UiUtils.openInSystemViewer(file);
}
} else {
projectTree.expandPath(selectedPath);
}
}
}
}
}
});
this.projectTree.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(@Nonnull final MouseEvent e) {
if (e.getClickCount() > 1) {
final int selRow = projectTree.getRowForLocation(e.getX(), e.getY());
final TreePath selPath = projectTree.getPathForLocation(e.getX(), e.getY());
if (selRow >= 0) {
final NodeFileOrFolder node = (NodeFileOrFolder) selPath.getLastPathComponent();
if (node != null && !node.isLoading() && node.isLeaf()) {
final File file = node.makeFileForNode();
if (file != null) {
if (context.openFileAsTab(file)) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
context.centerRootTopicIfFocusedMMD();
}
});
} else {
UiUtils.openInSystemViewer(file);
}
}
}
}
}
}
@Override
public void mouseReleased(@Nonnull final MouseEvent e) {
if (e.isPopupTrigger()) {
processPopup(e);
}
}
@Override
public void mousePressed(@Nonnull final MouseEvent e) {
if (e.isPopupTrigger()) {
processPopup(e);
}
}
private void processPopup(@Nonnull final MouseEvent e) {
final TreePath selPath = projectTree.getPathForLocation(e.getX(), e.getY());
if (selPath != null) {
projectTree.setSelectionPath(selPath);
final Object last = selPath.getLastPathComponent();
if (last instanceof NodeFileOrFolder) {
if (((NodeFileOrFolder)last).isLoading()) {
makePopupMenuForLoading((NodeFileOrFolder) last).show(e.getComponent(), e.getX(), e.getY());
} else {
makePopupMenu((NodeFileOrFolder) last).show(e.getComponent(), e.getX(), e.getY());
}
}
}
}
});
}
@Nonnull
public JTree getProjectTree() {
return this.projectTree;
}
public boolean hasSelectedItem() {
return this.projectTree.getSelectionPath() != null;
}
public void showPopUpForSelectedItem() {
final TreePath path = this.projectTree.getSelectionPath();
if (path != null) {
final NodeFileOrFolder component = (NodeFileOrFolder) path.getLastPathComponent();
final Rectangle rect = this.projectTree.getRowBounds(this.projectTree.getRowForPath(path));
final JPopupMenu popupMenu = component.isLoading() ? makePopupMenuForLoading(component) : makePopupMenu(component);
popupMenu.show(this.projectTree, rect.x + rect.width / 2, rect.y + rect.height / 2);
}
}
@Override
public void requestFocus() {
this.projectTree.requestFocus();
}
@Nonnull
@MustNotContainNull
public List<NodeFileOrFolder> findForNamePattern(@Nullable final Pattern namePattern) {
return getCurrentGroup().findForNamePattern(namePattern);
}
@Nonnull
@MustNotContainNull
public List<NodeFileOrFolder> findNodesForFile(@Nonnull final File file) {
return getCurrentGroup().findRelatedNodes(file, new ArrayList<NodeFileOrFolder>());
}
public void сloseProject(@Nonnull final NodeProject tree) {
((NodeProjectGroup) this.projectTree.getModel()).removeProject(tree);
this.context.onCloseProject(tree);
}
public void focusToFirstElement() {
this.projectTree.focusToFirstElement();
}
public void focusToFileItem(@Nonnull final File file) {
final NodeProjectGroup group = getCurrentGroup();
final TreePath pathToFile = group.findPathToFile(file);
if (pathToFile != null) {
this.projectTree.setSelectionPath(pathToFile);
this.projectTree.scrollPathToVisible(pathToFile);
}
}
public void unfoldProject(@Nonnull final NodeProject node) {
Utils.safeSwingCall(new Runnable() {
@Override
public void run() {
projectTree.expandPath(new TreePath(new Object[]{getCurrentGroup(), node}));
}
});
}
@Nonnull
private JPopupMenu makePopupMenuForLoading(@Nonnull final NodeFileOrFolder node) {
final JPopupMenu result = new JPopupMenu();
if (node instanceof NodeProject) {
final JMenuItem stopAndClose = new JMenuItem("Cancel and remove");
stopAndClose.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
((NodeProject) node).cancel();
сloseProject((NodeProject) node);
}
});
result.add(stopAndClose);
}
return result;
}
@Nonnull
private JPopupMenu makePopupMenu(@Nonnull final NodeFileOrFolder node) {
final JPopupMenu result = new JPopupMenu();
if (!node.isLeaf()) {
final JMenu makeNew = new JMenu("New...");
JMenuItem item = new JMenuItem("Folder");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addChildTo(node, null);
}
});
makeNew.add(item);
item = new JMenuItem("Mind map");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addChildTo(node, "mmd"); //NOI18N
}
});
makeNew.add(item);
item = new JMenuItem("Text");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addChildTo(node, "txt"); //NOI18N
}
});
makeNew.add(item);
item = new JMenuItem("PlantUML");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addChildTo(node, "puml"); //NOI18N
}
});
makeNew.add(item);
result.add(makeNew);
}
if (!node.isProjectKnowledgeFolder()) {
final JMenuItem rename = new JMenuItem("Rename");
rename.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
projectTree.startEditingAtPath(node.makeTreePath());
}
});
result.add(rename);
}
if (node instanceof NodeProject) {
final JMenuItem close = new JMenuItem("Close");
close.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
if (DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(Main.getApplicationFrame(), "Close '" + node + '\'', "Do you really want close '" + node + "\'?\nIt will be just removed from the tree.")) {
сloseProject((NodeProject) node);
}
}
});
result.add(close);
final JMenuItem refresh = new JMenuItem("Reload");
refresh.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
getCurrentGroup().startProjectFolderRefresh((NodeProject) node);
}
});
result.add(refresh);
}
final JMenuItem delete = new JMenuItem("Delete");
delete.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
if (DialogProviderManager.getInstance().getDialogProvider().msgConfirmYesNo(null, "Delete", "Do you really want to delete \"" + node + "\"?")) {
context.deleteTreeNode(node);
}
}
});
result.add(delete);
final JMenuItem openInSystem = new JMenuItem("Open in System");
openInSystem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
final File file = node.makeFileForNode();
if (file != null && file.exists()) {
UiUtils.openInSystemViewer(file);
}
}
});
result.add(openInSystem);
if (node instanceof NodeProject) {
final NodeProject theProject = (NodeProject) node;
if (!theProject.hasKnowledgeFolder()) {
final File knowledgeFolder = new File(theProject.getFolder(), Context.KNOWLEDGE_FOLDER);
final JMenuItem addKnowledgeFolder = new JMenuItem("Create Knowledge folder");
addKnowledgeFolder.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
if (knowledgeFolder.mkdirs()) {
getCurrentGroup().startProjectFolderRefresh(theProject, new Runnable() {
@Override
public void run() {
context.focusInTree(knowledgeFolder);
}
});
} else {
LOGGER.error("Can't create knowledge folder : " + knowledgeFolder); //NOI18N
}
}
});
result.add(addKnowledgeFolder);
}
}
final String BUILD_GRAPH_ITEM = "Build file links graph";
if (node instanceof NodeProject) {
final JMenuItem buildMindMapGraph = new JMenuItem(BUILD_GRAPH_ITEM);
buildMindMapGraph.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
context.showGraphMindMapFileLinksDialog(((NodeProject) node).getFolder(), null, true);
}
});
result.add(buildMindMapGraph);
} else if (node.isLeaf() && node.isMindMapFile()) {
final JMenuItem buildMindMapGraph = new JMenuItem(BUILD_GRAPH_ITEM);
buildMindMapGraph.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
final NodeProject project = node.findProject();
context.showGraphMindMapFileLinksDialog(project == null ? null : project.getFolder(), node.makeFileForNode(), true);
}
});
result.add(buildMindMapGraph);
}
final List<JMenuItem> optional = new ArrayList<>();
final JMenuItem menuSearchUsage = new JMenuItem("Find in maps");
menuSearchUsage.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
if (context.hasUnsavedDocument() && !DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(null, "Detected unsaved documents", "Unsaved content will not be processed!")) {
return;
}
final FindUsagesPanel panel = new FindUsagesPanel(context, node, false);
if (DialogProviderManager.getInstance().getDialogProvider().msgOkCancel(null, "Find usages in all opened projects", panel)) {
final NodeFileOrFolder selected = panel.getSelected();
panel.dispose();
if (selected != null) {
final File file = selected.makeFileForNode();
if (file != null) {
context.focusInTree(file);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
requestFocus();
}
});
}
}
} else {
panel.dispose();
}
}
});
optional.add(menuSearchUsage);
if (!node.isLeaf()) {
final JMenuItem menuSearchFilesForText = new JMenuItem("Find files for text");
menuSearchFilesForText.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
final FindFilesForTextPanel panel = new FindFilesForTextPanel(context, node, JOptionPane.OK_OPTION);
if (DialogProviderManager.getInstance().getDialogProvider().msgOkCancel(null, "Find files for text in '" + node.name + '\'', panel)) {
final NodeFileOrFolder selected = panel.getSelected();
panel.dispose();
if (selected != null) {
final File file = selected.makeFileForNode();
if (file != null) {
context.focusInTree(file);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
requestFocus();
}
});
}
}
} else {
panel.dispose();
}
}
});
optional.add(menuSearchFilesForText);
}
final TabTitle editingTab = this.context.getFocusedTab();
if (editingTab != null && editingTab.getType() == EditorContentType.MINDMAP) {
final JMenuItem addIntoMap = new JMenuItem("Add File as topic");
addIntoMap.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@Nonnull final ActionEvent e) {
addTreeAsTopic(context.findProjectForFile(editingTab.getAssociatedFile()), node, ((MMDEditor) editingTab.getProvider().getEditor()));
}
});
optional.add(addIntoMap);
}
if (!optional.isEmpty()) {
result.add(new JSeparator());
for (final JMenuItem i : optional) {
result.add(i);
}
}
return result;
}
private void addTreeAsTopic(@Nullable final NodeProject project, @Nonnull final NodeFileOrFolder node, @Nonnull final MMDEditor editor) {
final File projectFolder = project == null ? null : project.getFolder();
if (project != null) {
if (node.findProject() != project) {
if (!DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(null, "Different projects", "Opened Map file from another project. File paths will not be relative ones.")) {
return;
}
}
}
final List<Topic> targetTopics = new ArrayList<>(Arrays.asList(editor.getMindMapPanel().getSelectedTopics()));
if (targetTopics.size() > 1) {
if (!DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(null, "Multiple selection detected", "New children will be generated for all focused topics.")) {
return;
}
} else {
if (targetTopics.isEmpty() && editor.getMindMapPanel().getModel().getRoot() != null) {
if (!DialogProviderManager.getInstance().getDialogProvider().msgConfirmOkCancel(null, "No selected parent", "There is not selected topic. The Root will be used as the parent.")) {
return;
}
targetTopics.add(editor.getMindMapPanel().getModel().getRoot());
}
}
editor.getMindMapPanel().executeModelJobs(new MindMapPanel.ModelJob() {
@Nonnull
private Topic recursiveGenerateTopics(@Nullable final File projectFolder, @Nonnull final MindMap model, @Nullable final Topic parent, @Nonnull final NodeFileOrFolder node) {
final ExtraFile fileLink = new ExtraFile(new MMapURI(projectFolder, node.makeFileForNode(), null));
final Topic theTopic;
if (parent == null) {
theTopic = new Topic(model, null, node.toString(), fileLink);
} else {
theTopic = parent.makeChild(node.toString(), null);
theTopic.setExtra(fileLink);
}
if (!node.isLeaf()) {
final Enumeration<NodeFileOrFolder> children = node.children();
while (children.hasMoreElements()) {
recursiveGenerateTopics(projectFolder, model, theTopic, children.nextElement());
}
}
return theTopic;
}
@Override
public boolean doChangeModel(@Nonnull final MindMap model) {
Topic createdTopic = null;
if (targetTopics.isEmpty()) {
createdTopic = recursiveGenerateTopics(projectFolder, model, null, node);
} else {
boolean first = true;
for (final Topic t : targetTopics) {
final Topic generated = recursiveGenerateTopics(projectFolder, model, t, node);
if (first) {
createdTopic = generated;
}
first = false;
}
}
if (editor.getMindMapPanel().getSelectedTopics().length == 0 && createdTopic != null) {
final Topic forfocus = createdTopic;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
editor.getMindMapPanel().focusTo(forfocus);
}
});
}
return true;
}
});
}
private void addChildTo(@Nonnull final NodeFileOrFolder folder, @Nullable final String extension) {
String fileName = JOptionPane.showInputDialog(Main.getApplicationFrame(), extension == null ? "Folder name" : "File name", extension == null ? "New folder" : "New " + extension.toUpperCase(Locale.ENGLISH) + " file", JOptionPane.QUESTION_MESSAGE);
if (fileName != null) {
fileName = fileName.trim();
if (NodeProjectGroup.FILE_NAME.matcher(fileName).matches()) {
if (extension != null) {
final String providedExtension = FilenameUtils.getExtension(fileName);
if (!extension.equalsIgnoreCase(providedExtension)) {
fileName += '.' + extension;
}
}
final File file = new File(folder.makeFileForNode(), fileName);
if (file.exists()) {
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "File '" + fileName + "' already exists!");
return;
}
boolean ok = false;
if (extension == null) {
if (!file.mkdirs()) {
LOGGER.error("Can't create folder"); //NOI18N
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Can't create folder '" + fileName + "'!");
} else {
ok = true;
}
} else {
switch (extension) {
case "mmd": { //NOI18N
final MindMap model = new MindMap(null, true);
model.setAttribute("showJumps", "true"); //NOI18N
final Topic root = model.getRoot();
if (root != null) {
root.setText("Root"); //NOI18N
}
try {
FileUtils.write(file, model.write(new StringWriter()).toString(), "UTF-8"); //NOI18N
ok = true;
} catch (IOException ex) {
LOGGER.error("Can't create MMD file", ex); //NOI18N
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Can't create mind map '" + fileName + "'!");
}
}
break;
case "puml": { //NOI18N
final String nextLine = GetUtils.ensureNonNull(System.getProperty("line.separator"), "\n");
final String text = "@startuml " + nextLine + nextLine + "@enduml";
try {
FileUtils.write(file, text, "UTF-8"); //NOI18N
ok = true;
} catch (IOException ex) {
LOGGER.error("Can't create PUML file", ex); //NOI18N
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Can't create puml file '" + fileName + "'!");
}
}
break;
case "txt": { //NOI18N
try {
FileUtils.write(file, "", "UTF-8"); //NOI18N
ok = true;
} catch (IOException ex) {
LOGGER.error("Can't create TXT file", ex); //NOI18N
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Can't create txt file '" + fileName + "'!");
}
}
break;
default:
throw new Error("Unexpected extension : " + extension); //NOI18N
}
}
if (ok) {
try {
getCurrentGroup().addChild(folder, PrefUtils.isShowHiddenFilesAndFolders(), file);
context.openFileAsTab(file);
context.focusInTree(file);
} catch (IOException ex) {
MainFrame.showExceptionDialog(ex);
}
}
} else {
DialogProviderManager.getInstance().getDialogProvider().msgError(null, "Illegal file name!");
}
}
}
public boolean deleteNode(@Nonnull final NodeFileOrFolder node) {
return getCurrentGroup().fireNotificationThatNodeDeleted(node);
}
@Nonnull
public NodeProjectGroup getCurrentGroup() {
return (NodeProjectGroup) this.projectTree.getModel();
}
public void setModel(@Nonnull final NodeProjectGroup model, final boolean expandFirst) {
this.projectTree.setModel(Assertions.assertNotNull(model));
if (expandFirst && model.getChildCount() > 0) {
this.projectTree.expandPath(new TreePath(new Object[]{model, model.getChildAt(0)}));
}
}
}
| added icon into cancel item | mind-map/scia-reto/src/main/java/com/igormaznitsa/sciareto/ui/tree/ExplorerTree.java | added icon into cancel item |
|
Java | apache-2.0 | 3bc6e30dc8b9af8d172801950ab3117bc355764b | 0 | droolsjbpm/drools-wb,jomarko/drools-wb,porcelli-forks/drools-wb,droolsjbpm/drools-wb,porcelli-forks/drools-wb,karreiro/drools-wb,droolsjbpm/drools-wb,karreiro/drools-wb,jomarko/drools-wb,Rikkola/drools-wb,droolsjbpm/drools-wb,porcelli-forks/drools-wb,jomarko/drools-wb,karreiro/drools-wb,Rikkola/drools-wb,Rikkola/drools-wb | /*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.dtablexls.backend.server;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.commons.io.IOUtils;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.drools.decisiontable.InputType;
import org.drools.decisiontable.SpreadsheetCompiler;
import org.drools.template.parser.DecisionTableParseException;
import org.drools.workbench.models.guided.dtable.shared.conversion.ConversionResult;
import org.drools.workbench.screens.dtablexls.service.DecisionTableXLSContent;
import org.drools.workbench.screens.dtablexls.service.DecisionTableXLSConversionService;
import org.drools.workbench.screens.dtablexls.service.DecisionTableXLSService;
import org.guvnor.common.services.backend.config.SafeSessionInfo;
import org.guvnor.common.services.backend.exceptions.ExceptionUtilities;
import org.guvnor.common.services.backend.file.JavaFileFilter;
import org.guvnor.common.services.backend.util.CommentedOptionFactory;
import org.guvnor.common.services.backend.validation.GenericValidator;
import org.guvnor.common.services.shared.metadata.model.Overview;
import org.guvnor.common.services.shared.validation.model.ValidationMessage;
import org.jboss.errai.bus.server.annotations.Service;
import org.jboss.errai.security.shared.service.AuthenticationService;
import org.kie.workbench.common.services.backend.file.DRLFileFilter;
import org.kie.workbench.common.services.backend.service.KieService;
import org.kie.workbench.common.services.shared.source.SourceGenerationFailedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.uberfire.backend.server.util.Paths;
import org.uberfire.backend.vfs.Path;
import org.uberfire.ext.editor.commons.service.CopyService;
import org.uberfire.ext.editor.commons.service.DeleteService;
import org.uberfire.ext.editor.commons.service.RenameService;
import org.uberfire.io.IOService;
import org.uberfire.java.nio.file.StandardOpenOption;
import org.uberfire.rpc.SessionInfo;
import org.uberfire.rpc.impl.SessionInfoImpl;
import org.uberfire.workbench.events.ResourceOpenedEvent;
@Service
@ApplicationScoped
// Implementation needs to implement both interfaces even though one extends the other
// otherwise the implementation discovery mechanism for the @Service annotation fails.
public class DecisionTableXLSServiceImpl
extends KieService<DecisionTableXLSContent>
implements DecisionTableXLSService,
ExtendedDecisionTableXLSService {
private static final Logger log = LoggerFactory.getLogger( DecisionTableXLSServiceImpl.class );
private static final JavaFileFilter FILTER_JAVA = new JavaFileFilter();
private static final DRLFileFilter FILTER_DRL = new DRLFileFilter();
private IOService ioService;
private CopyService copyService;
private DeleteService deleteService;
private RenameService renameService;
private Event<ResourceOpenedEvent> resourceOpenedEvent;
private DecisionTableXLSConversionService conversionService;
private GenericValidator genericValidator;
private CommentedOptionFactory commentedOptionFactory;
private AuthenticationService authenticationService;
public DecisionTableXLSServiceImpl() {
}
@Inject
public DecisionTableXLSServiceImpl( @Named("ioStrategy") final IOService ioService,
final CopyService copyService,
final DeleteService deleteService,
final RenameService renameService,
final Event<ResourceOpenedEvent> resourceOpenedEvent,
final DecisionTableXLSConversionService conversionService,
final GenericValidator genericValidator,
final CommentedOptionFactory commentedOptionFactory,
final AuthenticationService authenticationService ) {
this.ioService = ioService;
this.copyService = copyService;
this.deleteService = deleteService;
this.renameService = renameService;
this.resourceOpenedEvent = resourceOpenedEvent;
this.conversionService = conversionService;
this.genericValidator = genericValidator;
this.commentedOptionFactory = commentedOptionFactory;
this.authenticationService = authenticationService;
}
@Override
public DecisionTableXLSContent loadContent( final Path path ) {
return super.loadContent( path );
}
@Override
protected DecisionTableXLSContent constructContent( Path path,
Overview overview ) {
final DecisionTableXLSContent content = new DecisionTableXLSContent();
content.setOverview( overview );
return content;
}
@Override
public InputStream load( final Path path,
final String sessionId ) {
try {
final InputStream inputStream = ioService.newInputStream( Paths.convert( path ),
StandardOpenOption.READ );
//Signal opening to interested parties
resourceOpenedEvent.fire( new ResourceOpenedEvent( path,
getSessionInfo( sessionId ) ) );
return inputStream;
} catch ( Exception e ) {
throw ExceptionUtilities.handleException( e );
}
}
@Override
public Path create( final Path resource,
final InputStream content,
final String sessionId,
final String comment ) {
final SessionInfo sessionInfo = getSessionInfo( sessionId );
log.info( "USER:" + sessionInfo.getIdentity().getIdentifier() + " CREATING asset [" + resource.getFileName() + "]" );
FileOutputStream tempFOS = null;
OutputStream outputStream = null;
try {
File tempFile = File.createTempFile( "testxls", null );
tempFOS = new FileOutputStream( tempFile );
IOUtils.copy( content, tempFOS );
tempFOS.flush();
//Validate the xls
validate( tempFile );
final org.uberfire.java.nio.file.Path nioPath = Paths.convert( resource );
ioService.createFile( nioPath );
outputStream = ioService.newOutputStream( nioPath,
commentedOptionFactory.makeCommentedOption( comment,
sessionInfo.getIdentity(),
sessionInfo ) );
IOUtils.copy( new FileInputStream( tempFile ),
outputStream );
outputStream.flush();
//Read Path to ensure attributes have been set
final Path newPath = Paths.convert( nioPath );
return newPath;
} catch ( Exception e ) {
throw ExceptionUtilities.handleException( e );
} finally {
try {
content.close();
} catch ( IOException e ) {
throw ExceptionUtilities.handleException( e );
}
if (tempFOS != null) {
try {
tempFOS.close();
} catch ( IOException e ) {
throw ExceptionUtilities.handleException( e );
}
}
if (outputStream != null) {
try {
outputStream.close();
} catch ( IOException e ) {
throw ExceptionUtilities.handleException( e );
}
}
}
}
void validate( final File tempFile ) {
try {
WorkbookFactory.create( new FileInputStream( tempFile ) );
} catch ( InvalidFormatException e ) {
throw new DecisionTableParseException( "DecisionTableParseException: An error occurred opening the workbook. It is possible that the encoding of the document did not match the encoding of the reader.",
e );
} catch ( IOException e ) {
throw new DecisionTableParseException( "DecisionTableParseException: Failed to open Excel stream, " + "please check that the content is xls97 format.",
e );
} catch ( Throwable e ) {
throw new DecisionTableParseException( "DecisionTableParseException: " + e.getMessage(),
e );
}
}
@Override
public Path save( final Path resource,
final InputStream content,
final String sessionId,
final String comment ) {
final SessionInfo sessionInfo = getSessionInfo( sessionId );
log.info( "USER:" + sessionInfo.getIdentity().getIdentifier() + " UPDATING asset [" + resource.getFileName() + "]" );
OutputStream outputStream = null;
try {
final org.uberfire.java.nio.file.Path nioPath = Paths.convert( resource );
outputStream = ioService.newOutputStream( nioPath,
commentedOptionFactory.makeCommentedOption( comment,
sessionInfo.getIdentity(),
sessionInfo ) );
IOUtils.copy( content,
outputStream );
outputStream.flush();
//Read Path to ensure attributes have been set
final Path newPath = Paths.convert( nioPath );
return newPath;
} catch ( Exception e ) {
throw ExceptionUtilities.handleException( e );
} finally {
try {
content.close();
} catch ( IOException e ) {
throw ExceptionUtilities.handleException( e );
}
if (outputStream != null) {
try {
outputStream.close();
} catch ( IOException e ) {
throw ExceptionUtilities.handleException( e );
}
}
}
}
@Override
public String getSource( final Path path ) {
InputStream inputStream = null;
try {
final SpreadsheetCompiler compiler = new SpreadsheetCompiler();
inputStream = ioService.newInputStream( Paths.convert( path ),
StandardOpenOption.READ );
final String drl = compiler.compile( inputStream,
InputType.XLS );
return drl;
} catch ( Exception e ) {
throw new SourceGenerationFailedException( e.getMessage() );
} finally {
if ( inputStream != null ) {
try {
inputStream.close();
} catch ( IOException ioe ) {
throw ExceptionUtilities.handleException( ioe );
}
}
}
}
@Override
public void delete( final Path path,
final String comment ) {
try {
deleteService.delete( path,
comment );
} catch ( Exception e ) {
throw ExceptionUtilities.handleException( e );
}
}
@Override
public Path rename( final Path path,
final String newName,
final String comment ) {
try {
return renameService.rename( path,
newName,
comment );
} catch ( Exception e ) {
throw ExceptionUtilities.handleException( e );
}
}
@Override
public Path copy( final Path path,
final String newName,
final String comment ) {
try {
return copyService.copy( path,
newName,
comment );
} catch ( Exception e ) {
throw ExceptionUtilities.handleException( e );
}
}
@Override
public ConversionResult convert( final Path path ) {
try {
return conversionService.convert( path );
} catch ( Exception e ) {
throw ExceptionUtilities.handleException( e );
}
}
@Override
public List<ValidationMessage> validate( final Path path,
final Path resource ) {
InputStream inputStream = null;
try {
inputStream = ioService.newInputStream( Paths.convert( path ),
StandardOpenOption.READ );
return genericValidator.validate( path,
inputStream,
FILTER_DRL,
FILTER_JAVA );
} catch ( Exception e ) {
throw ExceptionUtilities.handleException( e );
} finally {
if ( inputStream != null ) {
try {
inputStream.close();
} catch ( IOException ioe ) {
throw ExceptionUtilities.handleException( ioe );
}
}
}
}
private SessionInfo getSessionInfo( final String sessionId ) {
return new SafeSessionInfo( new SessionInfoImpl( sessionId,
authenticationService.getUser() ) );
}
}
| drools-wb-screens/drools-wb-dtable-xls-editor/drools-wb-dtable-xls-editor-backend/src/main/java/org/drools/workbench/screens/dtablexls/backend/server/DecisionTableXLSServiceImpl.java | /*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.dtablexls.backend.server;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.commons.io.IOUtils;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.drools.decisiontable.InputType;
import org.drools.decisiontable.SpreadsheetCompiler;
import org.drools.template.parser.DecisionTableParseException;
import org.drools.workbench.models.guided.dtable.shared.conversion.ConversionResult;
import org.drools.workbench.screens.dtablexls.service.DecisionTableXLSContent;
import org.drools.workbench.screens.dtablexls.service.DecisionTableXLSConversionService;
import org.drools.workbench.screens.dtablexls.service.DecisionTableXLSService;
import org.guvnor.common.services.backend.config.SafeSessionInfo;
import org.guvnor.common.services.backend.exceptions.ExceptionUtilities;
import org.guvnor.common.services.backend.file.JavaFileFilter;
import org.guvnor.common.services.backend.util.CommentedOptionFactory;
import org.guvnor.common.services.backend.validation.GenericValidator;
import org.guvnor.common.services.shared.metadata.model.Overview;
import org.guvnor.common.services.shared.validation.model.ValidationMessage;
import org.jboss.errai.bus.server.annotations.Service;
import org.jboss.errai.security.shared.service.AuthenticationService;
import org.kie.workbench.common.services.backend.file.DRLFileFilter;
import org.kie.workbench.common.services.backend.service.KieService;
import org.kie.workbench.common.services.shared.source.SourceGenerationFailedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.uberfire.backend.server.util.Paths;
import org.uberfire.backend.vfs.Path;
import org.uberfire.ext.editor.commons.service.CopyService;
import org.uberfire.ext.editor.commons.service.DeleteService;
import org.uberfire.ext.editor.commons.service.RenameService;
import org.uberfire.io.IOService;
import org.uberfire.java.nio.file.StandardOpenOption;
import org.uberfire.rpc.SessionInfo;
import org.uberfire.rpc.impl.SessionInfoImpl;
import org.uberfire.workbench.events.ResourceOpenedEvent;
@Service
@ApplicationScoped
// Implementation needs to implement both interfaces even though one extends the other
// otherwise the implementation discovery mechanism for the @Service annotation fails.
public class DecisionTableXLSServiceImpl
extends KieService<DecisionTableXLSContent>
implements DecisionTableXLSService,
ExtendedDecisionTableXLSService {
private static final Logger log = LoggerFactory.getLogger( DecisionTableXLSServiceImpl.class );
private static final JavaFileFilter FILTER_JAVA = new JavaFileFilter();
private static final DRLFileFilter FILTER_DRL = new DRLFileFilter();
private IOService ioService;
private CopyService copyService;
private DeleteService deleteService;
private RenameService renameService;
private Event<ResourceOpenedEvent> resourceOpenedEvent;
private DecisionTableXLSConversionService conversionService;
private GenericValidator genericValidator;
private CommentedOptionFactory commentedOptionFactory;
private AuthenticationService authenticationService;
public DecisionTableXLSServiceImpl() {
}
@Inject
public DecisionTableXLSServiceImpl( @Named("ioStrategy") final IOService ioService,
final CopyService copyService,
final DeleteService deleteService,
final RenameService renameService,
final Event<ResourceOpenedEvent> resourceOpenedEvent,
final DecisionTableXLSConversionService conversionService,
final GenericValidator genericValidator,
final CommentedOptionFactory commentedOptionFactory,
final AuthenticationService authenticationService ) {
this.ioService = ioService;
this.copyService = copyService;
this.deleteService = deleteService;
this.renameService = renameService;
this.resourceOpenedEvent = resourceOpenedEvent;
this.conversionService = conversionService;
this.genericValidator = genericValidator;
this.commentedOptionFactory = commentedOptionFactory;
this.authenticationService = authenticationService;
}
@Override
public DecisionTableXLSContent loadContent( final Path path ) {
return super.loadContent( path );
}
@Override
protected DecisionTableXLSContent constructContent( Path path,
Overview overview ) {
final DecisionTableXLSContent content = new DecisionTableXLSContent();
content.setOverview( overview );
return content;
}
@Override
public InputStream load( final Path path,
final String sessionId ) {
try {
final InputStream inputStream = ioService.newInputStream( Paths.convert( path ),
StandardOpenOption.READ );
//Signal opening to interested parties
resourceOpenedEvent.fire( new ResourceOpenedEvent( path,
getSessionInfo( sessionId ) ) );
return inputStream;
} catch ( Exception e ) {
log.error( e.getMessage(),
e );
throw ExceptionUtilities.handleException( e );
}
}
@Override
public Path create( final Path resource,
final InputStream content,
final String sessionId,
final String comment ) {
final SessionInfo sessionInfo = getSessionInfo( sessionId );
log.info( "USER:" + sessionInfo.getIdentity().getIdentifier() + " CREATING asset [" + resource.getFileName() + "]" );
try {
File tempFile = File.createTempFile( "testxls", null );
FileOutputStream tempFOS = new FileOutputStream( tempFile );
IOUtils.copy( content, tempFOS );
tempFOS.flush();
tempFOS.close();
//Validate the xls
validate( tempFile );
final org.uberfire.java.nio.file.Path nioPath = Paths.convert( resource );
ioService.createFile( nioPath );
final OutputStream outputStream = ioService.newOutputStream( nioPath,
commentedOptionFactory.makeCommentedOption( comment,
sessionInfo.getIdentity(),
sessionInfo ) );
IOUtils.copy( new FileInputStream( tempFile ),
outputStream );
outputStream.flush();
outputStream.close();
//Read Path to ensure attributes have been set
final Path newPath = Paths.convert( nioPath );
return newPath;
} catch ( Exception e ) {
log.error( e.getMessage(),
e );
e.printStackTrace();
throw ExceptionUtilities.handleException( e );
} finally {
try {
content.close();
} catch ( IOException e ) {
throw ExceptionUtilities.handleException( e );
}
}
}
void validate( final File tempFile ) {
try {
WorkbookFactory.create( new FileInputStream( tempFile ) );
} catch ( InvalidFormatException e ) {
throw new DecisionTableParseException( "DecisionTableParseException: An error occurred opening the workbook. It is possible that the encoding of the document did not match the encoding of the reader.",
e );
} catch ( IOException e ) {
throw new DecisionTableParseException( "DecisionTableParseException: Failed to open Excel stream, " + "please check that the content is xls97 format.",
e );
} catch ( Throwable e ) {
throw new DecisionTableParseException( "DecisionTableParseException: " + e.getMessage(),
e );
}
}
@Override
public Path save( final Path resource,
final InputStream content,
final String sessionId,
final String comment ) {
final SessionInfo sessionInfo = getSessionInfo( sessionId );
log.info( "USER:" + sessionInfo.getIdentity().getIdentifier() + " UPDATING asset [" + resource.getFileName() + "]" );
try {
final org.uberfire.java.nio.file.Path nioPath = Paths.convert( resource );
final OutputStream outputStream = ioService.newOutputStream( nioPath,
commentedOptionFactory.makeCommentedOption( comment,
sessionInfo.getIdentity(),
sessionInfo ) );
IOUtils.copy( content,
outputStream );
outputStream.flush();
outputStream.close();
//Read Path to ensure attributes have been set
final Path newPath = Paths.convert( nioPath );
return newPath;
} catch ( Exception e ) {
log.error( e.getMessage(),
e );
throw ExceptionUtilities.handleException( e );
} finally {
try {
content.close();
} catch ( IOException e ) {
throw ExceptionUtilities.handleException( e );
}
}
}
@Override
public String getSource( final Path path ) {
InputStream inputStream = null;
try {
final SpreadsheetCompiler compiler = new SpreadsheetCompiler();
inputStream = ioService.newInputStream( Paths.convert( path ),
StandardOpenOption.READ );
final String drl = compiler.compile( inputStream,
InputType.XLS );
return drl;
} catch ( Exception e ) {
throw new SourceGenerationFailedException( e.getMessage() );
} finally {
if ( inputStream != null ) {
try {
inputStream.close();
} catch ( IOException ioe ) {
//Swallow
}
}
}
}
@Override
public void delete( final Path path,
final String comment ) {
try {
deleteService.delete( path,
comment );
} catch ( Exception e ) {
log.error( e.getMessage(),
e );
throw ExceptionUtilities.handleException( e );
}
}
@Override
public Path rename( final Path path,
final String newName,
final String comment ) {
try {
return renameService.rename( path,
newName,
comment );
} catch ( Exception e ) {
log.error( e.getMessage(),
e );
throw ExceptionUtilities.handleException( e );
}
}
@Override
public Path copy( final Path path,
final String newName,
final String comment ) {
try {
return copyService.copy( path,
newName,
comment );
} catch ( Exception e ) {
log.error( e.getMessage(),
e );
throw ExceptionUtilities.handleException( e );
}
}
@Override
public ConversionResult convert( final Path path ) {
try {
return conversionService.convert( path );
} catch ( Exception e ) {
log.error( e.getMessage(),
e );
throw ExceptionUtilities.handleException( e );
}
}
@Override
public List<ValidationMessage> validate( final Path path,
final Path resource ) {
try {
final InputStream inputStream = ioService.newInputStream( Paths.convert( path ),
StandardOpenOption.READ );
return genericValidator.validate( path,
inputStream,
FILTER_DRL,
FILTER_JAVA );
} catch ( Exception e ) {
log.error( e.getMessage(),
e );
throw ExceptionUtilities.handleException( e );
}
}
private SessionInfo getSessionInfo( final String sessionId ) {
return new SafeSessionInfo( new SessionInfoImpl( sessionId,
authenticationService.getUser() ) );
}
}
| clean-up exception handling in DecisionTableXLSServiceImpl (#163)
- removed some duplicate logging (ExceptionUtilities.handleException(..) already logs the exception)
- added some stream closing
- added some more exception "handling" - let's be consistent when closing these streams | drools-wb-screens/drools-wb-dtable-xls-editor/drools-wb-dtable-xls-editor-backend/src/main/java/org/drools/workbench/screens/dtablexls/backend/server/DecisionTableXLSServiceImpl.java | clean-up exception handling in DecisionTableXLSServiceImpl (#163) |
|
Java | apache-2.0 | d978bd2b4b86cf09c5b0b4facdbd5252e664cad5 | 0 | cloudsmith/orientdb,cloudsmith/orientdb,cloudsmith/orientdb,cloudsmith/orientdb | /*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.config;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.ConsoleHandler;
import java.util.logging.FileHandler;
import com.orientechnologies.common.log.OLogManager;
/**
* Keeps all configuration settings. At startup assigns the configuration values by reading system properties.
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*
*/
public enum OGlobalConfiguration {
// LOG
LOG_CONSOLE_LEVEL("log.console.level", "Console's logging level", String.class, "info", new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OLogManager.instance().setLevel((String) iNewValue, ConsoleHandler.class);
}
}), LOG_FILE_LEVEL("log.file.level", "File's logging level", String.class, "fine", new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OLogManager.instance().setLevel((String) iNewValue, FileHandler.class);
}
}),
// STORAGE
STORAGE_KEEP_OPEN(
"storage.keepOpen",
"Tells to the engine to not close the storage when a database is closed. Storages will be closed when the process will shutdown",
Boolean.class, Boolean.FALSE),
STORAGE_CACHE_SIZE("storage.cache.size", "Size of the cache that keep the record in memory", Integer.class, 5000),
// DATABASE
DB_USE_CACHE("db.cache.enabled", "Uses the storage cache", Boolean.class, true),
OBJECT_SAVE_ONLY_DIRTY("object.saveOnlyDirty", "Object Database saves only object bound to dirty records", Boolean.class, false),
// TREEMAP
MVRBTREE_LAZY_UPDATES("mvrbtree.lazyUpdates", "Configure the TreeMaps (indexes and dictionaries) as buffered or not",
Integer.class, 300),
MVRBTREE_NODE_PAGE_SIZE("mvrbtree.nodePageSize",
"Page size of each single node. 1,024 means that 1,024 entries can be stored inside a node", Float.class, 1024),
MVRBTREE_LOAD_FACTOR("mvrbtree.loadFactor", "HashMap load factor", Float.class, 0.7f),
MVRBTREE_OPTIMIZE_THRESHOLD("mvrbtree.optimizeThreshold", "Auto optimize the TreeMap every X operations as get, put and remove",
Integer.class, 50000),
MVRBTREE_ENTRYPOINTS("mvrbtree.entryPoints", "Number of entry points to start searching entries", Integer.class, 5),
MVRBTREE_OPTIMIZE_ENTRYPOINTS_FACTOR("mvrbtree.optimizeEntryPointsFactor",
"Multiplicand factor to apply to entry-points list (parameter mvrbtree.entrypoints) to determine if needs of optimization",
Float.class, 1.0f),
// FILE
FILE_MMAP_BLOCK_SIZE("file.mmap.blockSize", "Size of the memory mapped block", Integer.class, 300000),
FILE_MMAP_MAX_MEMORY("file.mmap.maxMemory",
"Max memory allocable by memory mapping manager. Note that on 32bit OS the limit is to 2Gb but can change to OS by OS",
Integer.class, 110000000),
FILE_MMAP_FORCE_DELAY("file.mmap.forceDelay",
"Delay time in ms to wait for another force flush of the memory mapped block to the disk", Integer.class, 500),
FILE_MMAP_FORCE_RETRY("file.mmap.forceRetry", "Number of times the memory mapped block will try to flush to the disk",
Integer.class, 10),
// NETWORK
NETWORK_SOCKET_BUFFER_SIZE("network.socketBufferSize", "TCP/IP Socket buffer size", Integer.class, 32768),
NETWORK_SOCKET_TIMEOUT("network.timeout", "TCP/IP Socket timeout in ms", Integer.class, 10000),
NETWORK_SOCKET_RETRY("network.retry",
"Number of times the client connection retries to connect to the server in case of failure", Integer.class, 5),
NETWORK_SOCKET_RETRY_DELAY("network.retryDelay", "Number of ms the client wait to reconnect to the server in case of failure",
Integer.class, 500),
NETWORK_BINARY_MAX_CONTENT_LENGTH("network.binary.maxLength", "TCP/IP max content length in bytes of BINARY requests",
Integer.class, 100000),
NETWORK_BINARY_DEBUG("network.binary.debug", "Debug mode: print all the incoming data on binary channel", Boolean.class, false),
NETWORK_HTTP_MAX_CONTENT_LENGTH("network.http.maxLength", "TCP/IP max content length in bytes of HTTP requests", Integer.class,
100000),
// PROFILER
PROFILER_ENABLED("profiler.enabled", "Enable the recording of statistics and counters", Boolean.class, false),
// SERVER
SERVER_CACHE_STATIC_RESOURCES("server.cache.staticResources", "Cache static resources after loaded", Boolean.class, false),
// DISTRIBUTED SERVERS
DISTRIBUTED_ASYNC_TIME_DELAY("distributed.async.timeDelay",
"Delay time (in ms) of synchronization with slave nodes. 0 means early synchronization", Integer.class, 0),
DISTRIBUTED_SYNC_MAXRECORDS_BUFFER("distributed.sync.maxRecordsBuffer",
"Maximum number of records to buffer before to send to the slave nodes", Integer.class, 100);
private final String key;
private final Object defValue;
private final Class<?> type;
private Object value = null;
private String description;
private OConfigurationChangeCallback changeCallback = null;
// AT STARTUP AUTO-CONFIG
static {
readConfiguration();
}
OGlobalConfiguration(final String iKey, final String iDescription, final Class<?> iType, final Object iDefValue,
final OConfigurationChangeCallback iChangeAction) {
this(iKey, iDescription, iType, iDefValue);
changeCallback = iChangeAction;
}
OGlobalConfiguration(final String iKey, final String iDescription, final Class<?> iType, final Object iDefValue) {
key = iKey;
description = iDescription;
defValue = iDefValue;
type = iType;
}
public void setValue(final Object iValue) {
Object oldValue = value;
if (iValue != null)
if (type == Boolean.class)
value = Boolean.parseBoolean(iValue.toString());
else if (type == Integer.class)
value = Integer.parseInt(iValue.toString());
else if (type == Float.class)
value = Float.parseFloat(iValue.toString());
else if (type == String.class)
value = iValue.toString();
else
value = iValue;
if (changeCallback != null)
changeCallback.change(oldValue, value);
}
public Object getValue() {
return value != null ? value : defValue;
}
public boolean getValueAsBoolean() {
final Object v = value != null ? value : defValue;
return v instanceof Boolean ? ((Boolean) v).booleanValue() : Boolean.parseBoolean(v.toString());
}
public String getValueAsString() {
final Object v = value != null ? value : defValue;
return v.toString();
}
public int getValueAsInteger() {
final Object v = value != null ? value : defValue;
return v instanceof Integer ? ((Integer) v).intValue() : Integer.parseInt(v.toString());
}
public float getValueAsFloat() {
final Object v = value != null ? value : defValue;
return v instanceof Float ? ((Float) v).floatValue() : Float.parseFloat(v.toString());
}
public String getKey() {
return key;
}
public Class<?> getType() {
return type;
}
public String getDescription() {
return description;
}
/**
* Find the OGlobalConfiguration instance by the key. Key is case insensitive.
*
* @param iKey
* Key to find. It's case insensitive.
* @return OGlobalConfiguration instance if found, otherwise null
*/
public static OGlobalConfiguration findByKey(final String iKey) {
for (OGlobalConfiguration v : values()) {
if (v.getKey().equalsIgnoreCase(iKey))
return v;
}
return null;
}
/**
* Assign configuration values by reading system properties.
*/
private static void readConfiguration() {
String prop;
for (OGlobalConfiguration config : values()) {
prop = System.getProperty(config.key);
if (prop != null)
config.setValue(prop);
}
}
/**
* Change configuration values in one shot by passing a Map of values.
*/
public static void setConfiguration(final Map<String, Object> iConfig) {
OGlobalConfiguration cfg;
for (Entry<String, Object> config : iConfig.entrySet()) {
cfg = valueOf(config.getKey());
if (cfg != null)
cfg.setValue(config.getValue());
}
}
}
| core/src/main/java/com/orientechnologies/orient/core/config/OGlobalConfiguration.java | /*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.config;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.ConsoleHandler;
import java.util.logging.FileHandler;
import com.orientechnologies.common.log.OLogManager;
/**
* Keeps all configuration settings. At startup assigns the configuration values by reading system properties.
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*
*/
public enum OGlobalConfiguration {
// LOG
LOG_CONSOLE_LEVEL("log.console.level", "Console's logging level", String.class, "info", new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OLogManager.instance().setLevel((String) iNewValue, ConsoleHandler.class);
}
}), LOG_FILE_LEVEL("log.file.level", "File's logging level", String.class, "fine", new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OLogManager.instance().setLevel((String) iNewValue, FileHandler.class);
}
}),
// STORAGE
STORAGE_KEEP_OPEN(
"storage.keepOpen",
"Tells to the engine to not close the storage when a database is closed. Storages will be closed when the process will shutdown",
Boolean.class, Boolean.FALSE),
STORAGE_CACHE_SIZE("storage.cache.size", "Size of the cache that keep the record in memory", Integer.class, 5000),
// DATABASE
DB_USE_CACHE("db.cache.enabled", "Uses the storage cache", Boolean.class, true),
OBJECT_SAVE_ONLY_DIRTY("object.saveOnlyDirty", "Object Database saves only object bound to dirty records", Boolean.class, false),
// TREEMAP
MVRBTREE_LAZY_UPDATES("mvrbtree.lazyUpdates", "Configure the TreeMaps (indexes and dictionaries) as buffered or not",
Integer.class, 300),
MVRBTREE_NODE_PAGE_SIZE("mvrbtree.nodePageSize",
"Page size of each single node. 1,024 means that 1,024 entries can be stored inside a node", Float.class, 1024),
MVRBTREE_LOAD_FACTOR("mvrbtree.loadFactor", "HashMap load factor", Float.class, 0.7f),
MVRBTREE_OPTIMIZE_THRESHOLD("mvrbtree.optimizeThreshold", "Auto optimize the TreeMap every X operations as get, put and remove",
Integer.class, 50000),
MVRBTREE_ENTRYPOINTS("mvrbtree.entryPoints", "Number of entry points to start searching entries", Integer.class, 5),
MVRBTREE_OPTIMIZE_ENTRYPOINTS_FACTOR("mvrbtree.optimizeEntryPointsFactor",
"Multiplicand factor to apply to entry-points list (parameter mvrbtree.entrypoints) to determine if needs of optimization",
Float.class, 1.0f),
// FILE
FILE_MMAP_BLOCK_SIZE("file.mmap.blockSize", "Size of the memory mapped block", Integer.class, 300000),
FILE_MMAP_MAX_MEMORY("file.mmap.maxMemory",
"Max memory allocable by memory mapping manager. Note that on 32bit OS the limit is to 2Gb but can change to OS by OS",
Integer.class, 110000000),
FILE_MMAP_FORCE_DELAY("file.mmap.forceDelay",
"Delay time in ms to wait for another force flush of the memory mapped block to the disk", Integer.class, 500),
FILE_MMAP_FORCE_RETRY("file.mmap.forceRetry", "Number of times the memory mapped block will try to flush to the disk",
Integer.class, 10),
// NETWORK
NETWORK_SOCKET_BUFFER_SIZE("network.socketBufferSize", "TCP/IP Socket buffer size", Integer.class, 32768),
NETWORK_SOCKET_TIMEOUT("network.timeout", "TCP/IP Socket timeout in ms", Integer.class, 10000),
NETWORK_SOCKET_RETRY("network.retry",
"Number of times the client connection retries to connect to the server in case of failure", Integer.class, 5),
NETWORK_SOCKET_RETRY_DELAY("network.retryDelay", "Number of ms the client wait to reconnect to the server in case of failure",
Integer.class, 500),
NETWORK_BINARY_MAX_CONTENT_LENGTH("network.binary.maxLength", "TCP/IP max content length in bytes of BINARY requests",
Integer.class, 100000),
NETWORK_BINARY_DEBUG("network.binary.debug", "Debug mode: print all the incoming data on binary channel", Boolean.class, false),
NETWORK_HTTP_MAX_CONTENT_LENGTH("network.http.maxLength", "TCP/IP max content length in bytes of HTTP requests", Integer.class,
100000),
// PROFILER
PROFILER_ENABLED("profiler.enabled", "Enable the recording of statistics and counters", Boolean.class, false),
// SERVER
SERVER_CACHE_STATIC_RESOURCES("server.cache.staticResources", "Cache static resources after loaded", Boolean.class, false),
// DISTRIBUTED SERVERS
DISTRIBUTED_SERVER_SYNC_REPLICAS("distributed.server.sync.replicas",
"Number of synchronous replicas, use 1 to have at least a secure backup server in case of failure", Integer.class, 1),
DISTRIBUTED_SERVER_SYNC_TIME_DELAY("distributed.server.sync.timeDelay", "Delay time (in ms) of synchronization with slave nodes",
Integer.class, 30000),
DISTRIBUTED_SERVER_SYNC_MAXRECORDS_BUFFER("distributed.server.sync.maxRecordsBuffer",
"Maximum number of records to buffer before to send to the slave nodes", Integer.class, 100);
private final String key;
private final Object defValue;
private final Class<?> type;
private Object value = null;
private String description;
private OConfigurationChangeCallback changeCallback = null;
// AT STARTUP AUTO-CONFIG
static {
readConfiguration();
}
OGlobalConfiguration(final String iKey, final String iDescription, final Class<?> iType, final Object iDefValue,
final OConfigurationChangeCallback iChangeAction) {
this(iKey, iDescription, iType, iDefValue);
changeCallback = iChangeAction;
}
OGlobalConfiguration(final String iKey, final String iDescription, final Class<?> iType, final Object iDefValue) {
key = iKey;
description = iDescription;
defValue = iDefValue;
type = iType;
}
public void setValue(final Object iValue) {
Object oldValue = value;
if (iValue != null)
if (type == Boolean.class)
value = Boolean.parseBoolean(iValue.toString());
else if (type == Integer.class)
value = Integer.parseInt(iValue.toString());
else if (type == Float.class)
value = Float.parseFloat(iValue.toString());
else if (type == String.class)
value = iValue.toString();
else
value = iValue;
if (changeCallback != null)
changeCallback.change(oldValue, value);
}
public Object getValue() {
return value != null ? value : defValue;
}
public boolean getValueAsBoolean() {
final Object v = value != null ? value : defValue;
return v instanceof Boolean ? ((Boolean) v).booleanValue() : Boolean.parseBoolean(v.toString());
}
public String getValueAsString() {
final Object v = value != null ? value : defValue;
return v.toString();
}
public int getValueAsInteger() {
final Object v = value != null ? value : defValue;
return v instanceof Integer ? ((Integer) v).intValue() : Integer.parseInt(v.toString());
}
public float getValueAsFloat() {
final Object v = value != null ? value : defValue;
return v instanceof Float ? ((Float) v).floatValue() : Float.parseFloat(v.toString());
}
public String getKey() {
return key;
}
public Class<?> getType() {
return type;
}
public String getDescription() {
return description;
}
/**
* Find the OGlobalConfiguration instance by the key. Key is case insensitive.
*
* @param iKey
* Key to find. It's case insensitive.
* @return OGlobalConfiguration instance if found, otherwise null
*/
public static OGlobalConfiguration findByKey(final String iKey) {
for (OGlobalConfiguration v : values()) {
if (v.getKey().equalsIgnoreCase(iKey))
return v;
}
return null;
}
/**
* Assign configuration values by reading system properties.
*/
private static void readConfiguration() {
String prop;
for (OGlobalConfiguration config : values()) {
prop = System.getProperty(config.key);
if (prop != null)
config.setValue(prop);
}
}
/**
* Change configuration values in one shot by passing a Map of values.
*/
public static void setConfiguration(final Map<String, Object> iConfig) {
OGlobalConfiguration cfg;
for (Entry<String, Object> config : iConfig.entrySet()) {
cfg = valueOf(config.getKey());
if (cfg != null)
cfg.setValue(config.getValue());
}
}
}
| Changed parameter with a better name
| core/src/main/java/com/orientechnologies/orient/core/config/OGlobalConfiguration.java | Changed parameter with a better name |
|
Java | apache-2.0 | 3fb2e87be6d39a810adea88ab12571d82ab7b2d4 | 0 | MovingBlocks/Terasology,MovingBlocks/Terasology,Nanoware/Terasology,MovingBlocks/Terasology,Malanius/Terasology,Malanius/Terasology,Nanoware/Terasology,Nanoware/Terasology | /*
* Copyright 2015 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.nui.layers.mainMenu;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.assets.ResourceUrn;
import org.terasology.config.Config;
import org.terasology.config.ServerInfo;
import org.terasology.engine.GameEngine;
import org.terasology.engine.GameThread;
import org.terasology.engine.modes.StateLoading;
import org.terasology.engine.module.ModuleManager;
import org.terasology.i18n.TranslationSystem;
import org.terasology.identity.storageServiceClient.StorageServiceWorker;
import org.terasology.input.Keyboard;
import org.terasology.module.ModuleRegistry;
import org.terasology.naming.NameVersion;
import org.terasology.network.JoinStatus;
import org.terasology.network.NetworkSystem;
import org.terasology.network.PingService;
import org.terasology.network.ServerInfoMessage;
import org.terasology.network.ServerInfoService;
import org.terasology.registry.In;
import org.terasology.rendering.FontColor;
import org.terasology.rendering.nui.Color;
import org.terasology.rendering.nui.CoreScreenLayer;
import org.terasology.rendering.nui.WidgetUtil;
import org.terasology.rendering.nui.animation.MenuAnimationSystems;
import org.terasology.rendering.nui.databinding.BindHelper;
import org.terasology.rendering.nui.databinding.IntToStringBinding;
import org.terasology.rendering.nui.databinding.ReadOnlyBinding;
import org.terasology.rendering.nui.events.NUIKeyEvent;
import org.terasology.rendering.nui.itemRendering.StringTextRenderer;
import org.terasology.rendering.nui.layouts.CardLayout;
import org.terasology.rendering.nui.widgets.ActivateEventListener;
import org.terasology.rendering.nui.widgets.UIButton;
import org.terasology.rendering.nui.widgets.UILabel;
import org.terasology.rendering.nui.widgets.UIList;
import org.terasology.world.internal.WorldInfo;
import org.terasology.world.time.WorldTime;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
*/
public class JoinGameScreen extends CoreScreenLayer {
public static final ResourceUrn ASSET_URI = new ResourceUrn("engine:joinGameScreen");
private static final Logger logger = LoggerFactory.getLogger(JoinGameScreen.class);
@In
private Config config;
@In
private NetworkSystem networkSystem;
@In
private GameEngine engine;
@In
private ModuleManager moduleManager;
@In
private TranslationSystem translationSystem;
@In
private StorageServiceWorker storageServiceWorker;
private Map<ServerInfo, Future<ServerInfoMessage>> extInfo = new HashMap<>();
private ServerInfoService infoService;
private ServerListDownloader downloader;
private UIList<ServerInfo> visibleList;
private List<ServerInfo> listedServers = new ArrayList<>();
private Predicate<ServerInfo> activeServersOnly = ServerInfo::isActive;
private boolean updateComplete;
@Override
public void initialise() {
setAnimationSystem(MenuAnimationSystems.createDefaultSwipeAnimation());
downloader = new ServerListDownloader(config.getNetwork().getMasterServer());
CardLayout cards = find("cards", CardLayout.class);
UIList<ServerInfo> customServerList = find("customServerList", UIList.class);
if (customServerList != null) {
customServerList.setList(config.getNetwork().getServerInfos());
configureServerList(customServerList);
}
UIList<ServerInfo> onlineServerList = find("onlineServerList", UIList.class);
if (onlineServerList != null) {
onlineServerList.setList(listedServers);
configureServerList(onlineServerList);
}
ActivateEventListener activateCustom = e -> {
cards.setDisplayedCard("customServerListScrollArea");
find("customButton", UIButton.class).setFamily("highlight");
find("onlineButton", UIButton.class).setFamily("default");
visibleList = customServerList;
refresh();
};
WidgetUtil.trySubscribe(this, "customButton", activateCustom);
ActivateEventListener activateOnline = e -> {
cards.setDisplayedCard("onlineServerListScrollArea");
find("customButton", UIButton.class).setFamily("default");
find("onlineButton", UIButton.class).setFamily("highlight");
visibleList = onlineServerList;
refresh();
};
WidgetUtil.trySubscribe(this, "onlineButton", activateOnline);
bindCustomButtons();
bindInfoLabels();
WidgetUtil.trySubscribe(this, "close", button -> {
config.save();
triggerBackAnimation();
});
activateOnline.onActivated(null);
}
@Override
public void onOpened() {
super.onOpened();
infoService = new ServerInfoService();
if (!config.getPlayer().hasEnteredUsername()) {
getManager().pushScreen(EnterUsernamePopup.ASSET_URI, EnterUsernamePopup.class);
}
if (storageServiceWorker.hasConflictingIdentities()) {
new IdentityConflictHelper(storageServiceWorker, getManager(), translationSystem).runSolver();
}
}
@Override
public void update(float delta) {
super.update(delta);
if (!updateComplete) {
if (downloader.isDone()) {
updateComplete = true;
}
listedServers.clear();
listedServers.addAll(Collections2.filter(downloader.getServers(), activeServersOnly));
}
}
@Override
public void onClosed() {
infoService.close();
super.onClosed();
}
@Override
public boolean isLowerLayerVisible() {
return false;
}
private void join(final String address, final int port) {
Callable<JoinStatus> operation = () -> {
JoinStatus joinStatus = networkSystem.join(address, port);
return joinStatus;
};
final WaitPopup<JoinStatus> popup = getManager().pushScreen(WaitPopup.ASSET_URI, WaitPopup.class);
popup.setMessage(translationSystem.translate("${engine:menu#join-game-online}"),
translationSystem.translate("${engine:menu#connecting-to}")
+ " '"
+ address
+ ":"
+ port
+ "' - "
+ translationSystem.translate("${engine:menu#please-wait}"));
popup.onSuccess(result -> {
if (result.getStatus() != JoinStatus.Status.FAILED) {
engine.changeState(new StateLoading(result));
} else {
MessagePopup screen = getManager().pushScreen(MessagePopup.ASSET_URI, MessagePopup.class);
screen.setMessage(translationSystem.translate("${engine:menu#failed-to-join}"),
translationSystem.translate("${engine:menu#could-not-connect-to-server}") + " - " + result.getErrorMessage());
}
});
popup.startOperation(operation, true);
}
private void configureServerList(final UIList<ServerInfo> serverList) {
serverList.subscribe((widget, item) -> join(item.getAddress(), item.getPort()));
serverList.subscribeSelection((widget, item) -> {
extInfo.remove(item);
if (item != null) {
extInfo.put(item, infoService.requestInfo(item.getAddress(), item.getPort()));
refreshPing();
}
});
serverList.setItemRenderer(new StringTextRenderer<ServerInfo>() {
@Override
public String getString(ServerInfo value) {
return value.getName();
}
});
}
private void bindInfoLabels() {
final ReadOnlyBinding<ServerInfo> infoBinding = new ReadOnlyBinding<ServerInfo>() {
@Override
public ServerInfo get() {
return visibleList.getSelection();
}
};
UILabel name = find("name", UILabel.class);
if (name != null) {
name.bindText(BindHelper.bindBoundBeanProperty("name", infoBinding, ServerInfo.class, String.class));
}
UILabel owner = find("owner", UILabel.class);
if (owner != null) {
owner.bindText(BindHelper.bindBoundBeanProperty("owner", infoBinding, ServerInfo.class, String.class));
}
UILabel address = find("address", UILabel.class);
if (address != null) {
address.bindText(BindHelper.bindBoundBeanProperty("address", infoBinding, ServerInfo.class, String.class));
}
UILabel port = find("port", UILabel.class);
if (port != null) {
port.bindText(new IntToStringBinding(BindHelper.bindBoundBeanProperty("port", infoBinding, ServerInfo.class, int.class)));
}
UILabel onlinePlayers = find("onlinePlayers", UILabel.class);
onlinePlayers.bindText(new ReadOnlyBinding<String>() {
@Override
public String get() {
Future<ServerInfoMessage> info = extInfo.get(visibleList.getSelection());
if (info != null) {
if (info.isDone()) {
return getOnlinePlayersText(info);
} else {
return translationSystem.translate("${engine:menu#join-server-requested}");
}
}
return null;
}
});
UILabel modules = find("modules", UILabel.class);
modules.bindText(new ReadOnlyBinding<String>() {
@Override
public String get() {
Future<ServerInfoMessage> info = extInfo.get(visibleList.getSelection());
if (info != null) {
if (info.isDone()) {
return getModulesText(info);
} else {
return translationSystem.translate("${engine:menu#join-server-requested}");
}
}
return null;
}
});
UILabel worlds = find("worlds", UILabel.class);
worlds.bindText(new ReadOnlyBinding<String>() {
@Override
public String get() {
Future<ServerInfoMessage> info = extInfo.get(visibleList.getSelection());
if (info != null) {
if (info.isDone()) {
return getWorldText(info);
} else {
return translationSystem.translate("${engine:menu#join-server-requested}");
}
}
return null;
}
});
UIButton joinButton = find("join", UIButton.class);
if (joinButton != null) {
joinButton.bindEnabled(new ReadOnlyBinding<Boolean>() {
@Override
public Boolean get() {
return infoBinding.get() != null;
}
});
joinButton.subscribe(button -> {
config.save();
ServerInfo item = infoBinding.get();
if (item != null) {
join(item.getAddress(), item.getPort());
}
});
}
UIButton refreshButton = find("refresh", UIButton.class);
if (refreshButton != null) {
refreshButton.bindEnabled(new ReadOnlyBinding<Boolean>() {
@Override
public Boolean get() {
return visibleList.getSelection() != null;
}
});
refreshButton.subscribe(button -> {
refresh();
});
}
}
private void bindCustomButtons() {
UIList<?> customServerList = find("customServerList", UIList.class);
ReadOnlyBinding<Boolean> localSelectedServerOnly = new ReadOnlyBinding<Boolean>() {
@Override
public Boolean get() {
return customServerList.getSelection() != null;
}
};
UIButton add = find("add", UIButton.class);
if (add != null) {
add.subscribe(button -> {
AddServerPopup popup = getManager().pushScreen(AddServerPopup.ASSET_URI, AddServerPopup.class);
// select the entry if added successfully
popup.onSuccess(item -> {
config.getNetwork().addServerInfo(item);
visibleList.setSelection(item);
});
});
}
UIButton edit = find("edit", UIButton.class);
if (edit != null) {
edit.bindEnabled(localSelectedServerOnly);
edit.subscribe(button -> {
AddServerPopup popup = getManager().pushScreen(AddServerPopup.ASSET_URI, AddServerPopup.class);
ServerInfo info = visibleList.getSelection();
popup.setServerInfo(info);
// editing invalidates the currently known info, so query it again
popup.onSuccess(item -> extInfo.put(item, infoService.requestInfo(item.getAddress(), item.getPort())));
});
}
UIButton removeButton = find("remove", UIButton.class);
if (removeButton != null) {
removeButton.bindEnabled(localSelectedServerOnly);
removeButton.subscribe(button -> {
ServerInfo info = visibleList.getSelection();
if (info != null) {
config.getNetwork().removeServerInfo(info);
extInfo.remove(info);
visibleList.setSelection(null);
}
});
}
UILabel downloadLabel = find("download", UILabel.class);
if (downloadLabel != null) {
downloadLabel.bindText(new ReadOnlyBinding<String>() {
@Override
public String get() {
return translationSystem.translate(downloader.getStatus());
}
});
}
}
private String getWorldText(Future<ServerInfoMessage> info) {
try {
List<String> codedWorldInfo = new ArrayList<>();
ServerInfoMessage serverInfoMessage = info.get();
if (serverInfoMessage == null) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
for (WorldInfo wi : serverInfoMessage.getWorldInfoList()) {
float timeInDays = wi.getTime() / (float) WorldTime.DAY_LENGTH;
codedWorldInfo.add(String.format("%s (%.2f days)", wi.getTitle(), timeInDays));
}
return Joiner.on('\n').join(codedWorldInfo);
} catch (ExecutionException | InterruptedException e) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
}
private String getOnlinePlayersText(Future<ServerInfoMessage> info) {
try {
List<String> codedWorldInfo = new ArrayList<>();
ServerInfoMessage serverInfoMessage = info.get();
if (serverInfoMessage == null) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
codedWorldInfo.add(String.format("%d", serverInfoMessage.getOnlinePlayersAmount()));
return Joiner.on('\n').join(codedWorldInfo);
} catch (ExecutionException | InterruptedException e) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
}
private String getModulesText(Future<ServerInfoMessage> info) {
try {
ServerInfoMessage serverInfoMessage = info.get();
if (serverInfoMessage == null) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
List<String> codedModInfo = new ArrayList<>();
ModuleRegistry reg = moduleManager.getRegistry();
for (NameVersion entry : serverInfoMessage.getModuleList()) {
boolean isInstalled = reg.getModule(entry.getName(), entry.getVersion()) != null;
Color color = isInstalled ? Color.GREEN : Color.RED;
codedModInfo.add(FontColor.getColored(entry.toString(), color));
}
Collections.sort(codedModInfo, String.CASE_INSENSITIVE_ORDER);
return Joiner.on('\n').join(codedModInfo);
} catch (ExecutionException | InterruptedException e) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
}
private void refreshPing() {
String address = visibleList.getSelection().getAddress();
int port = visibleList.getSelection().getPort();
UILabel ping = find("ping", UILabel.class);
ping.setText("Requested");
Thread getPing = new Thread(() -> {
PingService pingService = new PingService(address, port);
// we're not on the game thread, so we cannot modify GUI elements directly
try {
long responseTime = pingService.call();
if (visibleList.getSelection().getAddress().equals(address)) {
GameThread.asynch(() -> ping.setText(responseTime + " ms."));
}
} catch (IOException e) {
String text = translationSystem.translate("${engine:menu#connection-failed}");
GameThread.asynch(() -> ping.setText(FontColor.getColored(text, Color.RED)));
}
});
// TODO: once the common thread pool is in place this could be posted there and the
// returned Future could be kept and cancelled as soon the selected menu entry changes
getPing.start();
}
public boolean onKeyEvent(NUIKeyEvent event) {
if (event.isDown()) {
if (event.getKey() == Keyboard.Key.ESCAPE) {
if (isEscapeToCloseAllowed()) {
triggerBackAnimation();
return true;
}
} else if (event.getKey() == Keyboard.Key.R) {
refresh();
}
}
return false;
}
public void refresh() {
ServerInfo i = visibleList.getSelection();
visibleList.setSelection(null);
extInfo.clear();
visibleList.setSelection(i);
}
}
| engine/src/main/java/org/terasology/rendering/nui/layers/mainMenu/JoinGameScreen.java | /*
* Copyright 2015 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.nui.layers.mainMenu;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.assets.ResourceUrn;
import org.terasology.config.Config;
import org.terasology.config.ServerInfo;
import org.terasology.engine.GameEngine;
import org.terasology.engine.GameThread;
import org.terasology.engine.modes.StateLoading;
import org.terasology.engine.module.ModuleManager;
import org.terasology.i18n.TranslationSystem;
import org.terasology.identity.storageServiceClient.StorageServiceWorker;
import org.terasology.input.Keyboard;
import org.terasology.module.ModuleRegistry;
import org.terasology.naming.NameVersion;
import org.terasology.network.JoinStatus;
import org.terasology.network.NetworkSystem;
import org.terasology.network.PingService;
import org.terasology.network.ServerInfoMessage;
import org.terasology.network.ServerInfoService;
import org.terasology.registry.In;
import org.terasology.rendering.FontColor;
import org.terasology.rendering.nui.Color;
import org.terasology.rendering.nui.CoreScreenLayer;
import org.terasology.rendering.nui.WidgetUtil;
import org.terasology.rendering.nui.animation.MenuAnimationSystems;
import org.terasology.rendering.nui.databinding.BindHelper;
import org.terasology.rendering.nui.databinding.IntToStringBinding;
import org.terasology.rendering.nui.databinding.ReadOnlyBinding;
import org.terasology.rendering.nui.events.NUIKeyEvent;
import org.terasology.rendering.nui.itemRendering.StringTextRenderer;
import org.terasology.rendering.nui.layouts.CardLayout;
import org.terasology.rendering.nui.widgets.ActivateEventListener;
import org.terasology.rendering.nui.widgets.UIButton;
import org.terasology.rendering.nui.widgets.UILabel;
import org.terasology.rendering.nui.widgets.UIList;
import org.terasology.world.internal.WorldInfo;
import org.terasology.world.time.WorldTime;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
*/
public class JoinGameScreen extends CoreScreenLayer {
public static final ResourceUrn ASSET_URI = new ResourceUrn("engine:joinGameScreen");
private static final Logger logger = LoggerFactory.getLogger(JoinGameScreen.class);
@In
private Config config;
@In
private NetworkSystem networkSystem;
@In
private GameEngine engine;
@In
private ModuleManager moduleManager;
@In
private TranslationSystem translationSystem;
@In
private StorageServiceWorker storageServiceWorker;
private Map<ServerInfo, Future<ServerInfoMessage>> extInfo = new HashMap<>();
private ServerInfoService infoService;
private ServerListDownloader downloader;
private UIList<ServerInfo> visibleList;
private List<ServerInfo> listedServers = new ArrayList<>();
private Predicate<ServerInfo> activeServersOnly = ServerInfo::isActive;
private boolean updateComplete;
@Override
public void initialise() {
setAnimationSystem(MenuAnimationSystems.createDefaultSwipeAnimation());
downloader = new ServerListDownloader(config.getNetwork().getMasterServer());
CardLayout cards = find("cards", CardLayout.class);
UIList<ServerInfo> customServerList = find("customServerList", UIList.class);
if (customServerList != null) {
customServerList.setList(config.getNetwork().getServerInfos());
configureServerList(customServerList);
}
UIList<ServerInfo> onlineServerList = find("onlineServerList", UIList.class);
if (onlineServerList != null) {
onlineServerList.setList(listedServers);
configureServerList(onlineServerList);
}
ActivateEventListener activateCustom = e -> {
cards.setDisplayedCard("customServerListScrollArea");
find("customButton", UIButton.class).setFamily("highlight");
find("onlineButton", UIButton.class).setFamily("default");
visibleList = customServerList;
};
WidgetUtil.trySubscribe(this, "customButton", activateCustom);
ActivateEventListener activateOnline = e -> {
cards.setDisplayedCard("onlineServerListScrollArea");
find("customButton", UIButton.class).setFamily("default");
find("onlineButton", UIButton.class).setFamily("highlight");
visibleList = onlineServerList;
};
WidgetUtil.trySubscribe(this, "onlineButton", activateOnline);
bindCustomButtons();
bindInfoLabels();
WidgetUtil.trySubscribe(this, "close", button -> {
config.save();
triggerBackAnimation();
});
activateOnline.onActivated(null);
}
@Override
public void onOpened() {
super.onOpened();
infoService = new ServerInfoService();
if (!config.getPlayer().hasEnteredUsername()) {
getManager().pushScreen(EnterUsernamePopup.ASSET_URI, EnterUsernamePopup.class);
}
if (storageServiceWorker.hasConflictingIdentities()) {
new IdentityConflictHelper(storageServiceWorker, getManager(), translationSystem).runSolver();
}
}
@Override
public void update(float delta) {
super.update(delta);
if (!updateComplete) {
if (downloader.isDone()) {
updateComplete = true;
}
listedServers.clear();
listedServers.addAll(Collections2.filter(downloader.getServers(), activeServersOnly));
}
}
@Override
public void onClosed() {
infoService.close();
super.onClosed();
}
@Override
public boolean isLowerLayerVisible() {
return false;
}
private void join(final String address, final int port) {
Callable<JoinStatus> operation = () -> {
JoinStatus joinStatus = networkSystem.join(address, port);
return joinStatus;
};
final WaitPopup<JoinStatus> popup = getManager().pushScreen(WaitPopup.ASSET_URI, WaitPopup.class);
popup.setMessage(translationSystem.translate("${engine:menu#join-game-online}"),
translationSystem.translate("${engine:menu#connecting-to}")
+ " '"
+ address
+ ":"
+ port
+ "' - "
+ translationSystem.translate("${engine:menu#please-wait}"));
popup.onSuccess(result -> {
if (result.getStatus() != JoinStatus.Status.FAILED) {
engine.changeState(new StateLoading(result));
} else {
MessagePopup screen = getManager().pushScreen(MessagePopup.ASSET_URI, MessagePopup.class);
screen.setMessage(translationSystem.translate("${engine:menu#failed-to-join}"),
translationSystem.translate("${engine:menu#could-not-connect-to-server}") + " - " + result.getErrorMessage());
}
});
popup.startOperation(operation, true);
}
private void configureServerList(final UIList<ServerInfo> serverList) {
serverList.subscribe((widget, item) -> join(item.getAddress(), item.getPort()));
serverList.subscribeSelection((widget, item) -> {
extInfo.remove(item);
if (item != null) {
extInfo.put(item, infoService.requestInfo(item.getAddress(), item.getPort()));
refreshPing();
}
});
serverList.setItemRenderer(new StringTextRenderer<ServerInfo>() {
@Override
public String getString(ServerInfo value) {
return value.getName();
}
});
}
private void bindInfoLabels() {
final ReadOnlyBinding<ServerInfo> infoBinding = new ReadOnlyBinding<ServerInfo>() {
@Override
public ServerInfo get() {
return visibleList.getSelection();
}
};
UILabel name = find("name", UILabel.class);
if (name != null) {
name.bindText(BindHelper.bindBoundBeanProperty("name", infoBinding, ServerInfo.class, String.class));
}
UILabel owner = find("owner", UILabel.class);
if (owner != null) {
owner.bindText(BindHelper.bindBoundBeanProperty("owner", infoBinding, ServerInfo.class, String.class));
}
UILabel address = find("address", UILabel.class);
if (address != null) {
address.bindText(BindHelper.bindBoundBeanProperty("address", infoBinding, ServerInfo.class, String.class));
}
UILabel port = find("port", UILabel.class);
if (port != null) {
port.bindText(new IntToStringBinding(BindHelper.bindBoundBeanProperty("port", infoBinding, ServerInfo.class, int.class)));
}
UILabel onlinePlayers = find("onlinePlayers", UILabel.class);
onlinePlayers.bindText(new ReadOnlyBinding<String>() {
@Override
public String get() {
Future<ServerInfoMessage> info = extInfo.get(visibleList.getSelection());
if (info != null) {
if (info.isDone()) {
return getOnlinePlayersText(info);
} else {
return translationSystem.translate("${engine:menu#join-server-requested}");
}
}
return null;
}
});
UILabel modules = find("modules", UILabel.class);
modules.bindText(new ReadOnlyBinding<String>() {
@Override
public String get() {
Future<ServerInfoMessage> info = extInfo.get(visibleList.getSelection());
if (info != null) {
if (info.isDone()) {
return getModulesText(info);
} else {
return translationSystem.translate("${engine:menu#join-server-requested}");
}
}
return null;
}
});
UILabel worlds = find("worlds", UILabel.class);
worlds.bindText(new ReadOnlyBinding<String>() {
@Override
public String get() {
Future<ServerInfoMessage> info = extInfo.get(visibleList.getSelection());
if (info != null) {
if (info.isDone()) {
return getWorldText(info);
} else {
return translationSystem.translate("${engine:menu#join-server-requested}");
}
}
return null;
}
});
UIButton joinButton = find("join", UIButton.class);
if (joinButton != null) {
joinButton.bindEnabled(new ReadOnlyBinding<Boolean>() {
@Override
public Boolean get() {
return infoBinding.get() != null;
}
});
joinButton.subscribe(button -> {
config.save();
ServerInfo item = infoBinding.get();
if (item != null) {
join(item.getAddress(), item.getPort());
}
});
}
UIButton refreshButton = find("refresh", UIButton.class);
if (refreshButton != null) {
refreshButton.bindEnabled(new ReadOnlyBinding<Boolean>() {
@Override
public Boolean get() {
return visibleList.getSelection() != null;
}
});
refreshButton.subscribe(button -> {
refresh();
});
}
}
private void bindCustomButtons() {
UIList<?> customServerList = find("customServerList", UIList.class);
ReadOnlyBinding<Boolean> localSelectedServerOnly = new ReadOnlyBinding<Boolean>() {
@Override
public Boolean get() {
return customServerList.getSelection() != null;
}
};
UIButton add = find("add", UIButton.class);
if (add != null) {
add.subscribe(button -> {
AddServerPopup popup = getManager().pushScreen(AddServerPopup.ASSET_URI, AddServerPopup.class);
// select the entry if added successfully
popup.onSuccess(item -> {
config.getNetwork().addServerInfo(item);
visibleList.setSelection(item);
});
});
}
UIButton edit = find("edit", UIButton.class);
if (edit != null) {
edit.bindEnabled(localSelectedServerOnly);
edit.subscribe(button -> {
AddServerPopup popup = getManager().pushScreen(AddServerPopup.ASSET_URI, AddServerPopup.class);
ServerInfo info = visibleList.getSelection();
popup.setServerInfo(info);
// editing invalidates the currently known info, so query it again
popup.onSuccess(item -> extInfo.put(item, infoService.requestInfo(item.getAddress(), item.getPort())));
});
}
UIButton removeButton = find("remove", UIButton.class);
if (removeButton != null) {
removeButton.bindEnabled(localSelectedServerOnly);
removeButton.subscribe(button -> {
ServerInfo info = visibleList.getSelection();
if (info != null) {
config.getNetwork().removeServerInfo(info);
extInfo.remove(info);
visibleList.setSelection(null);
}
});
}
UILabel downloadLabel = find("download", UILabel.class);
if (downloadLabel != null) {
downloadLabel.bindText(new ReadOnlyBinding<String>() {
@Override
public String get() {
return translationSystem.translate(downloader.getStatus());
}
});
}
}
private String getWorldText(Future<ServerInfoMessage> info) {
try {
List<String> codedWorldInfo = new ArrayList<>();
ServerInfoMessage serverInfoMessage = info.get();
if (serverInfoMessage == null) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
for (WorldInfo wi : serverInfoMessage.getWorldInfoList()) {
float timeInDays = wi.getTime() / (float) WorldTime.DAY_LENGTH;
codedWorldInfo.add(String.format("%s (%.2f days)", wi.getTitle(), timeInDays));
}
return Joiner.on('\n').join(codedWorldInfo);
} catch (ExecutionException | InterruptedException e) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
}
private String getOnlinePlayersText(Future<ServerInfoMessage> info) {
try {
List<String> codedWorldInfo = new ArrayList<>();
ServerInfoMessage serverInfoMessage = info.get();
if (serverInfoMessage == null) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
codedWorldInfo.add(String.format("%d", serverInfoMessage.getOnlinePlayersAmount()));
return Joiner.on('\n').join(codedWorldInfo);
} catch (ExecutionException | InterruptedException e) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
}
private String getModulesText(Future<ServerInfoMessage> info) {
try {
ServerInfoMessage serverInfoMessage = info.get();
if (serverInfoMessage == null) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
List<String> codedModInfo = new ArrayList<>();
ModuleRegistry reg = moduleManager.getRegistry();
for (NameVersion entry : serverInfoMessage.getModuleList()) {
boolean isInstalled = reg.getModule(entry.getName(), entry.getVersion()) != null;
Color color = isInstalled ? Color.GREEN : Color.RED;
codedModInfo.add(FontColor.getColored(entry.toString(), color));
}
Collections.sort(codedModInfo, String.CASE_INSENSITIVE_ORDER);
return Joiner.on('\n').join(codedModInfo);
} catch (ExecutionException | InterruptedException e) {
return FontColor.getColored(translationSystem.translate("${engine:menu#connection-failed}"), Color.RED);
}
}
private void refreshPing() {
String address = visibleList.getSelection().getAddress();
int port = visibleList.getSelection().getPort();
UILabel ping = find("ping", UILabel.class);
ping.setText("Requested");
Thread getPing = new Thread(() -> {
PingService pingService = new PingService(address, port);
// we're not on the game thread, so we cannot modify GUI elements directly
try {
long responseTime = pingService.call();
if (visibleList.getSelection().getAddress().equals(address)) {
GameThread.asynch(() -> ping.setText(responseTime + " ms."));
}
} catch (IOException e) {
String text = translationSystem.translate("${engine:menu#connection-failed}");
GameThread.asynch(() -> ping.setText(FontColor.getColored(text, Color.RED)));
}
});
// TODO: once the common thread pool is in place this could be posted there and the
// returned Future could be kept and cancelled as soon the selected menu entry changes
getPing.start();
}
public boolean onKeyEvent(NUIKeyEvent event) {
if (event.isDown()) {
if (event.getKey() == Keyboard.Key.ESCAPE) {
if (isEscapeToCloseAllowed()) {
triggerBackAnimation();
return true;
}
} else if (event.getKey() == Keyboard.Key.R) {
refresh();
}
}
return false;
}
public void refresh() {
ServerInfo i = visibleList.getSelection();
visibleList.setSelection(null);
extInfo.clear();
visibleList.setSelection(i);
}
}
| Added Refresh on changing from listed to custom in join game
| engine/src/main/java/org/terasology/rendering/nui/layers/mainMenu/JoinGameScreen.java | Added Refresh on changing from listed to custom in join game |
|
Java | apache-2.0 | ec27afc4a7c710b01c6954c4a975c1043721cb1d | 0 | slipperyseal/B9 | package net.catchpole.B9.devices.rockblock;
import net.catchpole.B9.devices.serial.SocketSerialPort;
import org.junit.Ignore;
import org.junit.Test;
@Ignore
public class RockBlockTest {
@Test
public void test() throws Exception {
RockBlock rockBlock = new RockBlock(new SocketSerialPort("localhost", 4000));
rockBlock.connect();
System.out.println(rockBlock.hasReception());
System.out.println(rockBlock.getStatus());
Thread.sleep(5000);
}
}
| B9-core/src/test/java/net/catchpole/B9/devices/rockblock/RockBlockTest.java | package net.catchpole.B9.devices.rockblock;
import net.catchpole.B9.devices.serial.SocketSerialPort;
import org.junit.Test;
public class RockBlockTest {
@Test
public void test() throws Exception {
RockBlock rockBlock = new RockBlock(new SocketSerialPort("localhost", 4000));
rockBlock.connect();
Thread.sleep(5000);
}
}
| disable integration test
| B9-core/src/test/java/net/catchpole/B9/devices/rockblock/RockBlockTest.java | disable integration test |
|
Java | apache-2.0 | a486437570a75ba1ffc594b86b76e78bacbaa3bc | 0 | HyungJon/HubTurbo,HyungJon/HubTurbo,Sumei1009/HubTurbo,Sumei1009/HubTurbo,saav/HubTurbo,saav/HubTurbo,ianngiaw/HubTurbo,Honoo/HubTurbo,gaieepo/HubTurbo,Honoo/HubTurbo,gaieepo/HubTurbo,ianngiaw/HubTurbo | package ui;
import java.lang.ref.WeakReference;
import javafx.event.ActionEvent;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.scene.web.HTMLEditor;
import javafx.scene.web.WebView;
import javafx.stage.Modality;
import javafx.stage.Stage;
public class EditableMarkupPopup extends Stage{
protected static final String EDIT_BTN_TXT = "\uf058";
protected static final String BACK_BTN_TXT = " \uf0a4 ";
private WebView markupDisplay;
private VBox editableDisplayView;
private HTMLEditor editableDisplay;
private ToggleButton modeButton;
private VBox container;
private Runnable editModeCompletion;
private Button completionButton;
private final String buttonText;
public EditableMarkupPopup(String buttonText){
this.buttonText = buttonText;
setupContents();
Scene scene = new Scene(container);
scene.getStylesheets().add(EditableMarkupPopup.class.getResource("hubturbo.css").toString());
this.setScene(scene);
this.initModality(Modality.APPLICATION_MODAL);
this.initOwner(null);
}
public String getDisplayedText(){
return editableDisplay.getHtmlText();
}
public void setDisplayedText(String markup){
markupDisplay.getEngine().loadContent(markup);
editableDisplay.setHtmlText(markup);
}
private void setupContents(){
container = new VBox();
setupToggleButton();
setupMarkupDisplay();
setupEditableDisplayView();
HBox buttonContainer = new HBox();
buttonContainer.setAlignment(Pos.BASELINE_RIGHT);
buttonContainer.getChildren().add(modeButton);
container.getChildren().addAll(buttonContainer, markupDisplay);
}
private void setupMarkupDisplay(){
markupDisplay = new WebView();
}
private void setupEditableDisplayView(){
editableDisplayView = new VBox();
editableDisplay = new HTMLEditor();
setupCompleteButton();
HBox buttonContainer = new HBox();
buttonContainer.setAlignment(Pos.BASELINE_RIGHT);
buttonContainer.getChildren().add(completionButton);
editableDisplayView.getChildren().addAll(editableDisplay, buttonContainer);
}
private void setupToggleButton(){
modeButton = new ToggleButton();
modeButton.setText(EDIT_BTN_TXT);
modeButton.getStyleClass().addAll("button-github-octicon", "borderless-toggle-button");
WeakReference<ToggleButton> btnRef = new WeakReference<>(modeButton);
modeButton.setOnAction((ActionEvent e) -> {
container.getChildren().remove(1);
if(btnRef.get().isSelected()){
btnRef.get().setText(BACK_BTN_TXT);
container.getChildren().add(editableDisplayView);
}else{
btnRef.get().setText(EDIT_BTN_TXT);
markupDisplay.getEngine().loadContent(getDisplayedText());
container.getChildren().add(markupDisplay);
}
});
}
private void setupCompleteButton(){
completionButton = new Button();
completionButton.setText(buttonText);
WeakReference<EditableMarkupPopup> selfRef = new WeakReference<>(this);
completionButton.setOnAction((ActionEvent e) -> {
EditableMarkupPopup self = selfRef.get();
if(self != null){
if(editModeCompletion != null){
editModeCompletion.run();
}
self.close();
}
});
}
public void setEditModeCompletion(Runnable completion){
editModeCompletion = completion;
}
}
| src/ui/EditableMarkupPopup.java | package ui;
import java.lang.ref.WeakReference;
import javafx.event.ActionEvent;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.scene.web.HTMLEditor;
import javafx.scene.web.WebView;
import javafx.stage.Modality;
import javafx.stage.Stage;
public class EditableMarkupPopup extends Stage{
protected static final String EDIT_BTN_TXT = "\uf058";
protected static final String BACK_BTN_TXT = " \uf0a4 ";
private WebView markupDisplay;
private VBox editableDisplayView;
private HTMLEditor editableDisplay;
private ToggleButton modeButton;
private VBox container;
private Runnable editModeCompletion;
private Button completionButton;
private final String buttonText;
public EditableMarkupPopup(String buttonText){
this.buttonText = buttonText;
setupContents();
Scene scene = new Scene(container);
this.setScene(scene);
this.initModality(Modality.APPLICATION_MODAL);
this.initOwner(null);
}
public String getDisplayedText(){
return editableDisplay.getHtmlText();
}
public void setDisplayedText(String markup){
markupDisplay.getEngine().loadContent(markup);
editableDisplay.setHtmlText(markup);
}
private void setupContents(){
container = new VBox();
setupToggleButton();
setupMarkupDisplay();
setupEditableDisplayView();
HBox buttonContainer = new HBox();
buttonContainer.setAlignment(Pos.BASELINE_RIGHT);
buttonContainer.getChildren().add(modeButton);
container.getChildren().addAll(buttonContainer, markupDisplay);
}
private void setupMarkupDisplay(){
markupDisplay = new WebView();
}
private void setupEditableDisplayView(){
editableDisplayView = new VBox();
editableDisplay = new HTMLEditor();
setupCompleteButton();
HBox buttonContainer = new HBox();
buttonContainer.setAlignment(Pos.BASELINE_RIGHT);
buttonContainer.getChildren().add(completionButton);
editableDisplayView.getChildren().addAll(editableDisplay, buttonContainer);
}
private void setupToggleButton(){
modeButton = new ToggleButton();
modeButton.setText(EDIT_BTN_TXT);
modeButton.getStyleClass().addAll("button-github-octicon", "borderless-toggle-button");
WeakReference<ToggleButton> btnRef = new WeakReference<>(modeButton);
modeButton.setOnAction((ActionEvent e) -> {
container.getChildren().remove(1);
if(btnRef.get().isSelected()){
btnRef.get().setText(BACK_BTN_TXT);
container.getChildren().add(editableDisplayView);
}else{
btnRef.get().setText(EDIT_BTN_TXT);
markupDisplay.getEngine().loadContent(getDisplayedText());
container.getChildren().add(markupDisplay);
}
});
}
private void setupCompleteButton(){
completionButton = new Button();
completionButton.setText(buttonText);
WeakReference<EditableMarkupPopup> selfRef = new WeakReference<>(this);
completionButton.setOnAction((ActionEvent e) -> {
EditableMarkupPopup self = selfRef.get();
if(self != null){
if(editModeCompletion != null){
editModeCompletion.run();
}
self.close();
}
});
}
public void setEditModeCompletion(Runnable completion){
editModeCompletion = completion;
}
}
| load required css
| src/ui/EditableMarkupPopup.java | load required css |
|
Java | apache-2.0 | cc0c541ab87970bf31be04c177c016fe16274628 | 0 | apache/tapestry-5,apache/tapestry-5,apache/tapestry-5,apache/tapestry-5,apache/tapestry-5 | // Copyright 2007, 2008 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.corelib.components;
import static org.easymock.EasyMock.isA;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.anyObject;
import org.apache.tapestry5.ComponentResources;
import org.apache.tapestry5.PropertyOverrides;
import org.apache.tapestry5.beaneditor.BeanModel;
import org.apache.tapestry5.integration.app1.data.RegistrationData;
import org.apache.tapestry5.internal.BeanValidationContext;
import org.apache.tapestry5.internal.BeanValidationContextImpl;
import org.apache.tapestry5.ioc.Location;
import org.apache.tapestry5.ioc.Messages;
import org.apache.tapestry5.ioc.internal.util.TapestryException;
import org.apache.tapestry5.services.BeanEditContext;
import org.apache.tapestry5.services.BeanModelSource;
import org.apache.tapestry5.services.Environment;
import org.apache.tapestry5.test.TapestryTestCase;
import org.easymock.EasyMock;
import org.easymock.IArgumentMatcher;
import org.testng.annotations.Test;
import java.lang.annotation.Annotation;
public class BeanEditorTest extends TapestryTestCase
{
@Test
public void object_created_as_needed()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
BeanModel model = mockBeanModel();
RegistrationData data = new RegistrationData();
Messages messages = mockMessages();
PropertyOverrides overrides = mockPropertyOverrides();
Environment env = EasyMock.createNiceMock(Environment.class);
train_getBoundType(resources, "object", RegistrationData.class);
train_createEditModel(source, RegistrationData.class, messages, model);
train_getOverrideMessages(overrides, messages);
expect(model.newInstance()).andReturn(data);
replay();
EasyMock.replay(env);
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
component.doPrepare();
assertSame(component.getObject(), data);
verify();
}
@Test
public void object_can_not_be_instantiated()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
BeanModel model = mockBeanModel();
Location l = mockLocation();
Throwable exception = new RuntimeException("Fall down go boom.");
PropertyOverrides overrides = mockPropertyOverrides();
Messages messages = mockMessages();
Environment env = EasyMock.createNiceMock(Environment.class);
train_getOverrideMessages(overrides, messages);
train_getBoundType(resources, "object", Runnable.class);
train_createEditModel(source, Runnable.class, messages, model);
expect(model.newInstance()).andThrow(exception);
train_getCompleteId(resources, "Foo.bar");
train_getLocation(resources, l);
expect(model.getBeanType()).andReturn(Runnable.class);
replay();
EasyMock.replay(env);
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
try
{
component.doPrepare();
unreachable();
}
catch (TapestryException ex)
{
assertMessageContains(
ex,
"Exception instantiating instance of java.lang.Runnable (for component \'Foo.bar\'):");
assertSame(ex.getLocation(), l);
}
verify();
}
private static BeanEditContext contextEq()
{
EasyMock.reportMatcher(new IArgumentMatcher()
{
public void appendTo(StringBuffer buf)
{
buf.append("BeanEditContextEq(RegistrationData.class)");
}
public boolean matches(Object argument)
{
return (argument instanceof BeanEditContext) &&
((BeanEditContext) argument).getBeanClass() == RegistrationData.class;
}
});
return null;
}
@Test
public void beaneditcontext_pushed_to_environment()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
BeanModel model = mockBeanModel();
Environment env = mockEnvironment();
RegistrationData data = new RegistrationData();
Messages messages = mockMessages();
PropertyOverrides overrides = mockPropertyOverrides();
BeanValidationContext beanValidationContext = newMock(BeanValidationContext.class);
train_getBoundType(resources, "object", RegistrationData.class);
train_createEditModel(source, RegistrationData.class, messages, model);
train_getOverrideMessages(overrides, messages);
expect(model.newInstance()).andReturn(data);
expect(model.getBeanType()).andReturn(RegistrationData.class);
BeanEditContext ctxt = new BeanEditContext()
{
public Class<?> getBeanClass()
{
return RegistrationData.class;
}
public <T extends Annotation> T getAnnotation(Class<T> type)
{
return null;
}
};
expect(env.push(eq(BeanEditContext.class), contextEq())).andReturn(ctxt);
expect(env.push(eq(BeanValidationContext.class), isA(BeanValidationContext.class)))
.andReturn(beanValidationContext);
replay();
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
component.doPrepare();
verify();
}
@Test
public void refresh_bean_validation_context()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
BeanModel model = mockBeanModel();
Environment env = mockEnvironment();
RegistrationData data = new RegistrationData();
Messages messages = mockMessages();
PropertyOverrides overrides = mockPropertyOverrides();
BeanValidationContext beanValidationContext = newMock(BeanValidationContext.class);
train_getBoundType(resources, "object", RegistrationData.class);
train_createEditModel(source, RegistrationData.class, messages, model);
train_getOverrideMessages(overrides, messages);
expect(model.newInstance()).andReturn(data);
expect(env.push(eq(BeanValidationContext.class), isA(BeanValidationContext.class))).andReturn(beanValidationContext);
expect(model.getBeanType()).andReturn(RegistrationData.class);
BeanEditContext ctxt = new BeanEditContext()
{
public Class<?> getBeanClass()
{
return RegistrationData.class;
}
public <T extends Annotation> T getAnnotation(Class<T> type)
{
return null;
}
};
expect(env.push(eq(BeanEditContext.class), contextEq())).andReturn(ctxt);
replay();
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
component.doPrepare();
verify();
}
@Test
public void beaneditcontext_popped_from_environment()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
Environment env = mockEnvironment();
PropertyOverrides overrides = mockPropertyOverrides();
expect(env.pop(BeanEditContext.class)).andReturn(null);
expect(env.pop(BeanValidationContext.class)).andReturn(null);
replay();
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
component.cleanupEnvironment();
verify();
}
}
| tapestry-core/src/test/java/org/apache/tapestry5/corelib/components/BeanEditorTest.java | // Copyright 2007, 2008 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.corelib.components;
import static org.easymock.EasyMock.isA;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.anyObject;
import org.apache.tapestry5.ComponentResources;
import org.apache.tapestry5.PropertyOverrides;
import org.apache.tapestry5.beaneditor.BeanModel;
import org.apache.tapestry5.integration.app1.data.RegistrationData;
import org.apache.tapestry5.internal.BeanValidationContext;
import org.apache.tapestry5.internal.BeanValidationContextImpl;
import org.apache.tapestry5.ioc.Location;
import org.apache.tapestry5.ioc.Messages;
import org.apache.tapestry5.ioc.internal.util.TapestryException;
import org.apache.tapestry5.services.BeanEditContext;
import org.apache.tapestry5.services.BeanModelSource;
import org.apache.tapestry5.services.Environment;
import org.apache.tapestry5.test.TapestryTestCase;
import org.easymock.EasyMock;
import org.easymock.IArgumentMatcher;
import org.testng.annotations.Test;
import java.lang.annotation.Annotation;
public class BeanEditorTest extends TapestryTestCase
{
@Test
public void object_created_as_needed()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
BeanModel model = mockBeanModel();
RegistrationData data = new RegistrationData();
Messages messages = mockMessages();
PropertyOverrides overrides = mockPropertyOverrides();
Environment env = EasyMock.createNiceMock(Environment.class);
train_getBoundType(resources, "object", RegistrationData.class);
train_createEditModel(source, RegistrationData.class, messages, model);
train_getOverrideMessages(overrides, messages);
expect(model.newInstance()).andReturn(data);
replay();
EasyMock.replay(env);
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
component.doPrepare();
assertSame(component.getObject(), data);
verify();
}
@Test
public void object_can_not_be_instantiated()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
BeanModel model = mockBeanModel();
Location l = mockLocation();
Throwable exception = new RuntimeException("Fall down go boom.");
PropertyOverrides overrides = mockPropertyOverrides();
Messages messages = mockMessages();
Environment env = EasyMock.createNiceMock(Environment.class);
train_getOverrideMessages(overrides, messages);
train_getBoundType(resources, "object", Runnable.class);
train_createEditModel(source, Runnable.class, messages, model);
expect(model.newInstance()).andThrow(exception);
train_getCompleteId(resources, "Foo.bar");
train_getLocation(resources, l);
expect(model.getBeanType()).andReturn(Runnable.class);
replay();
EasyMock.replay(env);
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
try
{
component.doPrepare();
unreachable();
}
catch (TapestryException ex)
{
assertMessageContains(
ex,
"Exception instantiating instance of java.lang.Runnable (for component \'Foo.bar\'):");
assertSame(ex.getLocation(), l);
}
verify();
}
private static BeanEditContext contextEq()
{
EasyMock.reportMatcher(new IArgumentMatcher()
{
public void appendTo(StringBuffer buf)
{
buf.append("BeanEditContextEq(RegistrationData.class)");
}
public boolean matches(Object argument)
{
return (argument instanceof BeanEditContext) &&
((BeanEditContext) argument).getBeanClass() == RegistrationData.class;
}
});
return null;
}
@Test
public void beaneditcontext_pushed_to_environment()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
BeanModel model = mockBeanModel();
Environment env = mockEnvironment();
RegistrationData data = new RegistrationData();
Messages messages = mockMessages();
PropertyOverrides overrides = mockPropertyOverrides();
train_getBoundType(resources, "object", RegistrationData.class);
train_createEditModel(source, RegistrationData.class, messages, model);
train_getOverrideMessages(overrides, messages);
expect(model.newInstance()).andReturn(data);
expect(model.getBeanType()).andReturn(RegistrationData.class);
BeanEditContext ctxt = new BeanEditContext()
{
public Class<?> getBeanClass()
{
return RegistrationData.class;
}
public <T extends Annotation> T getAnnotation(Class<T> type)
{
return null;
}
};
expect(env.push(eq(BeanEditContext.class), contextEq())).andReturn(ctxt);
expect(env.push(eq(BeanValidationContext.class), isA(BeanValidationContext.class)))
.andReturn(null);
replay();
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
component.doPrepare();
verify();
}
@Test
public void refresh_bean_validation_context()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
BeanModel model = mockBeanModel();
Environment env = mockEnvironment();
RegistrationData data = new RegistrationData();
Messages messages = mockMessages();
PropertyOverrides overrides = mockPropertyOverrides();
BeanValidationContext beanValidationContext = newMock(BeanValidationContext.class);
train_getBoundType(resources, "object", RegistrationData.class);
train_createEditModel(source, RegistrationData.class, messages, model);
train_getOverrideMessages(overrides, messages);
expect(model.newInstance()).andReturn(data);
expect(env.peek(eq(BeanValidationContext.class))).andReturn(beanValidationContext);
expect(env.pop(eq(BeanValidationContext.class))).andReturn(beanValidationContext);
expect(env.push(eq(BeanValidationContext.class), isA(BeanValidationContext.class))).andReturn(beanValidationContext);
expect(model.getBeanType()).andReturn(RegistrationData.class);
BeanEditContext ctxt = new BeanEditContext()
{
public Class<?> getBeanClass()
{
return RegistrationData.class;
}
public <T extends Annotation> T getAnnotation(Class<T> type)
{
return null;
}
};
expect(env.push(eq(BeanEditContext.class), contextEq())).andReturn(ctxt);
replay();
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
component.doPrepare();
verify();
}
@Test
public void beaneditcontext_popped_from_environment()
{
ComponentResources resources = mockComponentResources();
BeanModelSource source = mockBeanModelSource();
Environment env = mockEnvironment();
PropertyOverrides overrides = mockPropertyOverrides();
expect(env.pop(BeanEditContext.class)).andReturn(null);
expect(env.pop(BeanValidationContext.class)).andReturn(null);
replay();
BeanEditor component = new BeanEditor();
component.inject(resources, overrides, source, env);
component.cleanupEnvironment();
verify();
}
}
| RESOLVED - TAP5-2101: BeanEditor should always provide a new
BeanValidationContext (JSR-303)
- fix failing test
| tapestry-core/src/test/java/org/apache/tapestry5/corelib/components/BeanEditorTest.java | RESOLVED - TAP5-2101: BeanEditor should always provide a new BeanValidationContext (JSR-303) - fix failing test |
|
Java | apache-2.0 | 691807c400901a4272d9e687cf3661bd87485b9d | 0 | blunden/haveibeenpwned | /*
* Copyright (C) 2014 Bjrn Lundn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package se.blunden.haveibeenpwned;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import org.json.JSONException;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.DialogInterface.OnClickListener;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
public class MainActivity extends Activity {
private static final String TAG = "HaveIBeenPwned";
private static final int ABOUT_ID = Menu.FIRST;
private static final int CLEAR_ALL_ID = Menu.FIRST + 1;
private static String aboutMessage = null;
private AlertDialog mAboutDialog;
private SharedPreferences mPreferences;
private static HashMap<String, String> siteNames = null;
private static HashMap<String, String> siteDescriptions = null;
private static ArrayDeque<String> searchHistory = null;
private EditText searchInputField;
private ImageButton searchButton;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Explicitly specify the preference file to load instead of the default to actually make it read it properly
mPreferences = getApplicationContext().getSharedPreferences("preferences", Context.MODE_PRIVATE);
if(isFirstLaunch()) {
displayHelpCard();
storeFirstLaunch();
}
prepareAboutDialog();
populateSiteData();
searchHistory = new ArrayDeque<String>(4);
searchInputField = (EditText) findViewById(R.id.input_search);
searchInputField.setOnEditorActionListener(new EditText.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_ACTION_SEARCH) {
performSearch();
return true;
}
return false;
}
});
searchButton = (ImageButton) findViewById(R.id.button_search);
searchButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
performSearch();
}
});
}
private void performSearch() {
String account = searchInputField.getText().toString().trim();
// Add to search history
if(!account.equals("") || account == null) {
searchHistory.add(account);
}
Log.d(TAG, "Searching for account: " + account);
// Clear the search field
searchInputField.setText("");
showSpinner();
Toast.makeText(getBaseContext(), getString(R.string.toast_search), Toast.LENGTH_SHORT).show();
// Perform the search using the AsyncTask
new PerformSearchTask().execute(account);
}
private void displayOutput(String site, String restoredAccount) {
// Get a reference to the layout where the card will be displayed
final LinearLayout layout = (LinearLayout) findViewById(R.id.now_layout);
// Create the View for the card
final CardView card = new CardView(this);
// Specify layout parameters to be applied
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
lp.setMargins(0, 20, 0, 0);
// Set the internal state of the card
card.setSite(site);
// Show the prettier string if available
if(siteNames.containsKey(site)) {
card.setSiteHeaderText(siteNames.get(site));
} else {
card.setSiteHeaderText(site);
}
// Check if account is specified or pick the most recent from the search history if not
if(restoredAccount == null) {
if(!searchHistory.isEmpty()) {
card.setSiteAccountText("Compromised: " + searchHistory.peekLast());
}
} else {
card.setSiteAccountText(restoredAccount);
}
if(siteDescriptions.containsKey(site)) {
card.setSiteDescriptionText(siteDescriptions.get(site));
} else {
card.setSiteDescriptionText(getString(R.string.card_description_unavailable));
}
card.setLayoutParams(lp);
// Create the swipe-to-dismiss touch listener.
card.setOnTouchListener(new SwipeDismissTouchListener(
card,
null,
new SwipeDismissTouchListener.DismissCallbacks() {
@Override
public boolean canDismiss(Object token) {
return true;
}
@Override
public void onDismiss(View view, Object token) {
layout.removeView(card);
}
}));
layout.addView(card);
}
private void displayHelpCard() {
// Get a reference to the layout where the card will be displayed
final LinearLayout layout = (LinearLayout) findViewById(R.id.now_layout);
// Create the View for the card
final HelpCardView card = new HelpCardView(this);
// Specify layout parameters to be applied
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
lp.setMargins(0, 20, 0, 0);
card.setHeaderText(getString(R.string.card_title_help));
card.setDescriptionText(getString(R.string.card_description_help));
card.setDismissText(getString(R.string.card_swipe_dismiss));
card.setLayoutParams(lp);
// Create the swipe-to-dismiss touch listener.
card.setOnTouchListener(new SwipeDismissTouchListener(
card,
null,
new SwipeDismissTouchListener.DismissCallbacks() {
@Override
public boolean canDismiss(Object token) {
return true;
}
@Override
public void onDismiss(View view, Object token) {
layout.removeView(card);
}
}));
layout.addView(card);
}
private void showSpinner() {
searchInputField.setVisibility(View.INVISIBLE);
View spinner = findViewById(R.id.search_spinner);
spinner.setVisibility(View.VISIBLE);
}
private void hideSpinner() {
View spinner = findViewById(R.id.search_spinner);
spinner.setVisibility(View.GONE);
searchInputField.setVisibility(View.VISIBLE);
}
private void populateSiteData() {
// Increase initial capacity when new sites are added to the service
siteNames = new HashMap<String, String>(9);
siteNames.put("Adobe", getString(R.string.card_title_adobe));
siteNames.put("BattlefieldHeroes", getString(R.string.card_title_battlefield_heroes));
siteNames.put("Gawker", getString(R.string.card_title_gawker));
siteNames.put("PixelFederation", getString(R.string.card_title_pixel_federation));
siteNames.put("Snapchat", getString(R.string.card_title_snapchat));
siteNames.put("Sony", getString(R.string.card_title_sony));
siteNames.put("Stratfor", getString(R.string.card_title_stratfor));
siteNames.put("Vodafone", getString(R.string.card_title_vodafone));
siteNames.put("Yahoo", getString(R.string.card_title_yahoo));
// Increase initial capacity when new sites are added to the service
siteDescriptions = new HashMap<String, String>(9);
siteDescriptions.put("Adobe", getString(R.string.card_description_adobe));
siteDescriptions.put("BattlefieldHeroes", getString(R.string.card_description_battlefield_heroes));
siteDescriptions.put("Gawker", getString(R.string.card_description_gawker));
siteDescriptions.put("PixelFederation", getString(R.string.card_description_pixel_federation));
siteDescriptions.put("Snapchat", getString(R.string.card_description_snapchat));
siteDescriptions.put("Sony", getString(R.string.card_description_sony));
siteDescriptions.put("Stratfor", getString(R.string.card_description_stratfor));
siteDescriptions.put("Vodafone", getString(R.string.card_description_vodafone));
siteDescriptions.put("Yahoo", getString(R.string.card_description_yahoo));
}
private void prepareAboutDialog() {
if (aboutMessage == null) {
aboutMessage = getString(R.string.about_message);
}
mAboutDialog = new AlertDialog.Builder(this)
.setTitle(R.string.menu_about)
.setMessage(aboutMessage)
.setNeutralButton(R.string.ok, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.create();
}
private void clearAllCards() {
boolean finished = false;
while(!finished) {
ViewGroup group = (ViewGroup) findViewById(R.id.now_layout);
int count = group.getChildCount();
int i;
for (i = 0; i < count; i++) {
View view = group.getChildAt(i);
if (view instanceof CardView || view instanceof HelpCardView) {
group.removeView(view);
break;
}
}
if(i == count) {
finished = true;
}
}
}
private boolean isFirstLaunch() {
return mPreferences.getBoolean("firstLaunch", true);
}
private void storeFirstLaunch(){
SharedPreferences.Editor editor = mPreferences.edit();
editor.putBoolean("firstLaunch", false);
editor.apply();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// Store all formatted card strings to be able to restore on configuration change
ArrayList<String> savedSiteStrings = new ArrayList<String>();
ArrayList<String> savedAccountStrings = new ArrayList<String>();
boolean firstLaunch = false;
ViewGroup group = (ViewGroup) findViewById(R.id.now_layout);
for (int i = 0, count = group.getChildCount(); i < count; ++i) {
View view = group.getChildAt(i);
if (view instanceof CardView) {
savedSiteStrings.add(((CardView)view).getSite());
savedAccountStrings.add(((CardView)view).getSiteAccountView().getText().toString());
}
if (view instanceof HelpCardView) {
firstLaunch = true;
}
}
outState.putStringArrayList("savedSiteText", savedSiteStrings);
outState.putStringArrayList("savedAccountText", savedAccountStrings);
outState.putString("savedSearchInput", searchInputField.getText().toString());
outState.putBoolean("firstLaunch", firstLaunch);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
// Retrieve saved strings
ArrayList<String> savedSiteStrings = savedInstanceState.getStringArrayList("savedSiteText");
ArrayList<String> savedAccountStrings = savedInstanceState.getStringArrayList("savedAccountText");
boolean firstLaunch = savedInstanceState.getBoolean("firstLaunch");
// Restore saved user search field input
searchInputField.setText(savedInstanceState.getString("savedSearchInput"));
// Add the help card back
if(firstLaunch) {
displayHelpCard();
}
// Add the cards back
if(savedSiteStrings != null && savedAccountStrings != null) {
for(int i = 0; i < Math.max(savedSiteStrings.size(), savedAccountStrings.size()); i++) {
displayOutput(savedSiteStrings.get(i), savedAccountStrings.get(i));
}
}
super.onRestoreInstanceState(savedInstanceState);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
menu.add(0, ABOUT_ID, 0, R.string.menu_about)
.setIcon(android.R.drawable.ic_menu_info_details).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
menu.add(0, CLEAR_ALL_ID, 0, R.string.menu_clear_all)
.setIcon(android.R.drawable.ic_menu_close_clear_cancel).setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM);
return true;
}
@Override
public boolean onMenuItemSelected(int featureId, MenuItem item) {
switch(item.getItemId()) {
case ABOUT_ID:
mAboutDialog.show();
return true;
case CLEAR_ALL_ID:
// Clear the search field
searchInputField.setText("");
// Remove all the cards
clearAllCards();
return true;
}
return super.onMenuItemSelected(featureId, item);
}
private class PerformSearchTask extends AsyncTask<String, Void, ArrayList<String>> {
protected ArrayList<String> doInBackground(String... accounts) {
//Log.d(TAG, "doInBackground account: " + accounts[0]);
HaveIBeenPwnedAPI api = new HaveIBeenPwnedAPI();
ArrayList<String> result = new ArrayList<String>(9);
try {
result = api.query(accounts[0]);
} catch (URISyntaxException e) {
Toast.makeText(getBaseContext(), getString(R.string.error_invalid_uri_syntax), Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (IOException e) {
Toast.makeText(getBaseContext(), getString(R.string.error_invalid_response), Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (JSONException e) {
Toast.makeText(getBaseContext(), getString(R.string.error_json_parsing), Toast.LENGTH_SHORT).show();
e.printStackTrace();
}
return result;
}
protected void onPostExecute(ArrayList<String> result) {
hideSpinner();
if(result == null) {
Toast.makeText(getBaseContext(), getString(R.string.error_result_null), Toast.LENGTH_SHORT).show();
return;
} else if(!result.isEmpty()) {
for(String site : result) {
displayOutput(site, null);
}
}
}
}
}
| src/se/blunden/haveibeenpwned/MainActivity.java | /*
* Copyright (C) 2014 Bjrn Lundn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package se.blunden.haveibeenpwned;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import org.json.JSONException;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.DialogInterface.OnClickListener;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
public class MainActivity extends Activity {
private static final String TAG = "HaveIBeenPwned";
private static final int ABOUT_ID = Menu.FIRST;
private static final int CLEAR_ALL_ID = Menu.FIRST + 1;
private static String aboutMessage = null;
private AlertDialog mAboutDialog;
private SharedPreferences mPreferences;
private static HashMap<String, String> siteNames = null;
private static HashMap<String, String> siteDescriptions = null;
private static ArrayDeque<String> searchHistory = null;
private EditText searchInputField;
private ImageButton searchButton;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Explicitly specify the preference file to load instead of the default to actually make it read it properly
mPreferences = getApplicationContext().getSharedPreferences("preferences", Context.MODE_PRIVATE);
if(isFirstLaunch()) {
displayHelpCard();
storeFirstLaunch();
}
prepareAboutDialog();
populateSiteData();
searchHistory = new ArrayDeque<String>(4);
searchInputField = (EditText) findViewById(R.id.input_search);
searchInputField.setOnEditorActionListener(new EditText.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_ACTION_SEARCH) {
performSearch();
return true;
}
return false;
}
});
searchButton = (ImageButton) findViewById(R.id.button_search);
searchButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
performSearch();
}
});
}
private void performSearch() {
String account = searchInputField.getText().toString().trim();
// Add to search history
if(!account.equals("") || account == null) {
searchHistory.add(account);
}
Log.d(TAG, "Searching for account: " + account);
// Clear the search field
searchInputField.setText("");
showSpinner();
Toast.makeText(getBaseContext(), getString(R.string.toast_search), Toast.LENGTH_SHORT).show();
// Perform the search using the AsyncTask
new PerformSearchTask().execute(account);
}
private void displayOutput(String site, String restoredAccount) {
// Get a reference to the layout where the card will be displayed
final LinearLayout layout = (LinearLayout) findViewById(R.id.now_layout);
// Create the View for the card
final CardView card = new CardView(this);
// Specify layout parameters to be applied
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
lp.setMargins(0, 20, 0, 0);
// Set the internal state of the card
card.setSite(site);
// Show the prettier string if available
if(siteNames.containsKey(site)) {
card.setSiteHeaderText(siteNames.get(site));
} else {
card.setSiteHeaderText(site);
}
// Check if account is specified or pick the most recent from the search history if not
if(restoredAccount == null) {
if(!searchHistory.isEmpty()) {
card.setSiteAccountText("Compromised: " + searchHistory.peekLast());
}
} else {
card.setSiteAccountText(restoredAccount);
}
if(siteDescriptions.containsKey(site)) {
card.setSiteDescriptionText(siteDescriptions.get(site));
} else {
card.setSiteDescriptionText(getString(R.string.card_description_unavailable));
}
card.setLayoutParams(lp);
// Create the swipe-to-dismiss touch listener.
card.setOnTouchListener(new SwipeDismissTouchListener(
card,
null,
new SwipeDismissTouchListener.DismissCallbacks() {
@Override
public boolean canDismiss(Object token) {
return true;
}
@Override
public void onDismiss(View view, Object token) {
layout.removeView(card);
}
}));
layout.addView(card);
}
private void displayHelpCard() {
// Get a reference to the layout where the card will be displayed
final LinearLayout layout = (LinearLayout) findViewById(R.id.now_layout);
// Create the View for the card
final HelpCardView card = new HelpCardView(this);
// Specify layout parameters to be applied
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
lp.setMargins(0, 20, 0, 0);
card.setHeaderText(getString(R.string.card_title_help));
card.setDescriptionText(getString(R.string.card_description_help));
card.setDismissText(getString(R.string.card_swipe_dismiss));
card.setLayoutParams(lp);
// Create the swipe-to-dismiss touch listener.
card.setOnTouchListener(new SwipeDismissTouchListener(
card,
null,
new SwipeDismissTouchListener.DismissCallbacks() {
@Override
public boolean canDismiss(Object token) {
return true;
}
@Override
public void onDismiss(View view, Object token) {
layout.removeView(card);
}
}));
layout.addView(card);
}
private void showSpinner() {
searchInputField.setVisibility(View.INVISIBLE);
View spinner = findViewById(R.id.search_spinner);
spinner.setVisibility(View.VISIBLE);
}
private void hideSpinner() {
View spinner = findViewById(R.id.search_spinner);
spinner.setVisibility(View.GONE);
searchInputField.setVisibility(View.VISIBLE);
}
private void populateSiteData() {
// Increase initial capacity when new sites are added to the service
siteNames = new HashMap<String, String>(9);
siteNames.put("Adobe", getString(R.string.card_title_adobe));
siteNames.put("BattlefieldHeroes", getString(R.string.card_title_battlefield_heroes));
siteNames.put("Gawker", getString(R.string.card_title_gawker));
siteNames.put("PixelFederation", getString(R.string.card_title_pixel_federation));
siteNames.put("Snapchat", getString(R.string.card_title_snapchat));
siteNames.put("Sony", getString(R.string.card_title_sony));
siteNames.put("Stratfor", getString(R.string.card_title_stratfor));
siteNames.put("Vodafone", getString(R.string.card_title_vodafone));
siteNames.put("Yahoo", getString(R.string.card_title_yahoo));
// Increase initial capacity when new sites are added to the service
siteDescriptions = new HashMap<String, String>(9);
siteDescriptions.put("Adobe", getString(R.string.card_description_adobe));
siteDescriptions.put("BattlefieldHeroes", getString(R.string.card_description_battlefield_heroes));
siteDescriptions.put("Gawker", getString(R.string.card_description_gawker));
siteDescriptions.put("PixelFederation", getString(R.string.card_description_pixel_federation));
siteDescriptions.put("Snapchat", getString(R.string.card_description_snapchat));
siteDescriptions.put("Sony", getString(R.string.card_description_sony));
siteDescriptions.put("Stratfor", getString(R.string.card_description_stratfor));
siteDescriptions.put("Vodafone", getString(R.string.card_description_vodafone));
siteDescriptions.put("Yahoo", getString(R.string.card_description_yahoo));
}
private void prepareAboutDialog() {
if (aboutMessage == null) {
aboutMessage = getString(R.string.about_message);
}
mAboutDialog = new AlertDialog.Builder(this)
.setTitle(R.string.menu_about)
.setMessage(aboutMessage)
.setNeutralButton(R.string.ok, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.create();
}
private void clearAllCards() {
boolean finished = false;
while(!finished) {
ViewGroup group = (ViewGroup) findViewById(R.id.now_layout);
int count = group.getChildCount();
int i;
for (i = 0; i < count; i++) {
View view = group.getChildAt(i);
if (view instanceof CardView || view instanceof HelpCardView) {
group.removeView(view);
break;
}
}
if(i == count) {
finished = true;
}
}
}
private boolean isFirstLaunch() {
return mPreferences.getBoolean("firstLaunch", true);
}
private void storeFirstLaunch(){
SharedPreferences.Editor editor = mPreferences.edit();
editor.putBoolean("firstLaunch", false);
editor.apply();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// Store all formatted card strings to be able to restore on configuration change
ArrayList<String> savedSiteStrings = new ArrayList<String>();
ArrayList<String> savedAccountStrings = new ArrayList<String>();
boolean firstLaunch = false;
ViewGroup group = (ViewGroup) findViewById(R.id.now_layout);
for (int i = 0, count = group.getChildCount(); i < count; ++i) {
View view = group.getChildAt(i);
if (view instanceof CardView) {
savedSiteStrings.add(((CardView)view).getSite());
savedAccountStrings.add(((CardView)view).getSiteAccountView().getText().toString());
}
if (view instanceof HelpCardView) {
firstLaunch = true;
}
}
outState.putStringArrayList("savedSiteText", savedSiteStrings);
outState.putStringArrayList("savedAccountText", savedAccountStrings);
outState.putString("savedSearchInput", searchInputField.getText().toString());
outState.putBoolean("firstLaunch", firstLaunch);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
// Retrieve saved strings
ArrayList<String> savedSiteStrings = savedInstanceState.getStringArrayList("savedSiteText");
ArrayList<String> savedAccountStrings = savedInstanceState.getStringArrayList("savedAccountText");
boolean firstLaunch = savedInstanceState.getBoolean("firstLaunch");
// Restore saved user search field input
searchInputField.setText(savedInstanceState.getString("savedSearchInput"));
// Add the help card back
if(firstLaunch) {
displayHelpCard();
}
// Add the cards back
if(savedSiteStrings != null && savedAccountStrings != null) {
for(int i = 0; i < Math.max(savedSiteStrings.size(), savedAccountStrings.size()); i++) {
displayOutput(savedSiteStrings.get(i), savedAccountStrings.get(i));
}
}
super.onRestoreInstanceState(savedInstanceState);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
menu.add(0, ABOUT_ID, 0, R.string.menu_about)
.setIcon(android.R.drawable.ic_menu_info_details).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
menu.add(0, CLEAR_ALL_ID, 0, R.string.menu_clear_all)
.setIcon(android.R.drawable.ic_menu_close_clear_cancel).setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM);
return true;
}
@Override
public boolean onMenuItemSelected(int featureId, MenuItem item) {
switch(item.getItemId()) {
case ABOUT_ID:
mAboutDialog.show();
return true;
case CLEAR_ALL_ID:
// Clear the search field
searchInputField.setText("");
// Remove all the cards
clearAllCards();
return true;
}
return super.onMenuItemSelected(featureId, item);
}
private class PerformSearchTask extends AsyncTask<String, Void, ArrayList<String>> {
protected ArrayList<String> doInBackground(String... accounts) {
//Log.d(TAG, "doInBackground account: " + accounts[0]);
HaveIBeenPwnedAPI api = new HaveIBeenPwnedAPI();
ArrayList<String> result = new ArrayList<String>(9);
try {
result = api.query(accounts[0]);
} catch (URISyntaxException e) {
Toast.makeText(getBaseContext(), getString(R.string.error_invalid_uri_syntax), Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (IOException e) {
Toast.makeText(getBaseContext(), getString(R.string.error_invalid_response), Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (JSONException e) {
Toast.makeText(getBaseContext(), getString(R.string.error_json_parsing), Toast.LENGTH_SHORT).show();
e.printStackTrace();
}
return result;
}
protected void onPostExecute(ArrayList<String> result) {
if(result == null) {
Toast.makeText(getBaseContext(), getString(R.string.error_result_null), Toast.LENGTH_SHORT).show();
return;
} else if(!result.isEmpty()) {
for(String site : result) {
displayOutput(site, null);
}
}
hideSpinner();
}
}
}
| Always hide the spinner when search finished | src/se/blunden/haveibeenpwned/MainActivity.java | Always hide the spinner when search finished |
|
Java | apache-2.0 | dc8b4200b72bfd57804d7deaca726740585708e9 | 0 | SAP/cloud-odata-java,SAP/cloud-odata-java | package com.sap.core.odata.api.exception;
/**
* Common checked exception for <code>OData</code> library and base exception for all <code>OData</code> related exceptions.
*/
public class ODataException extends Exception {
private static final long serialVersionUID = 1L;
public ODataException() {
super();
}
public ODataException(String msg) {
super(msg);
}
public ODataException(String msg, Throwable e) {
super(msg, e);
}
public ODataException(Throwable e) {
super(e);
}
/**
* Check whether this exception was caused by a {@link ODataHttpException} exception.
*
* @return <code>true</code> if it was caused by an {@link ODataHttpException}, otherwise <code>false</code>.
*/
public boolean isCausedByHttpException() {
return getHttpExceptionCause() != null;
}
/**
* Search for and return first (from top) {@link ODataHttpException} in cause hierarchy.
* If no {@link ODataHttpException} in cause hierarchy <code>NULL</code> is returned.
*
* @return the first found {@link ODataHttpException} in the cause exception hierarchy.
* Or <code>NULL</code> if no {@link ODataHttpException} is found in cause hierarchy.
*/
public ODataHttpException getHttpExceptionCause() {
return getSpecificCause(ODataHttpException.class);
}
/**
* Check whether this exception was caused by a {@link ODataApplicationException} exception.
*
* @return <code>true</code> if it was caused by an {@link ODataApplicationException}, otherwise <code>false</code>.
*/
public boolean isCausedByApplicationException() {
return getApplicationExceptionCause() != null;
}
/**
* Check whether this exception was caused by a {@link ODataMessageException} exception.
*
* @return <code>true</code> if it was caused by an {@link ODataMessageException}, otherwise <code>false</code>.
*/
public boolean isCausedByMessageException() {
return getMessageExceptionCause() != null;
}
/**
* Search for and return first (from top) {@link ODataMessageException} in cause hierarchy.
* If no {@link ODataMessageException} in cause hierarchy <code>NULL</code> is returned.
*
* @return the first found {@link ODataMessageException} in the cause exception hierarchy.
* Or <code>NULL</code> if no {@link ODataMessageException} is found in cause hierarchy.
*/
public ODataMessageException getMessageExceptionCause() {
return getSpecificCause(ODataMessageException.class);
}
/**
* Search for and return first (from top) {@link ODataApplicationException} in cause hierarchy.
* If no {@link ODataApplicationException} in cause hierarchy <code>NULL</code> is returned.
*
* @return the first found {@link ODataApplicationException} in the cause exception hierarchy.
* Or <code>NULL</code> if no {@link ODataApplicationException} is found in cause hierarchy.
*/
public ODataApplicationException getApplicationExceptionCause() {
return getSpecificCause(ODataApplicationException.class);
}
private <T> T getSpecificCause(Class<T> causeClass) {
Throwable cause = getCause();
while (cause != null) {
if (causeClass.isInstance(cause)) {
return causeClass.cast(cause);
}
cause = cause.getCause();
}
return null;
}
}
| odata-api/src/main/java/com/sap/core/odata/api/exception/ODataException.java | package com.sap.core.odata.api.exception;
/**
* Common checked exception for <code>OData</code> library and base exception for all <code>OData</code> related exceptions.
*/
public class ODataException extends Exception {
private static final long serialVersionUID = 1L;
public ODataException() {
super();
}
public ODataException(String msg) {
super(msg);
}
public ODataException(String msg, Throwable e) {
super(msg, e);
}
public ODataException(Throwable e) {
super(e);
}
/**
* Check whether this exception was caused by a {@link ODataHttpException} exception.
*
* @return <code>true</code> if it was caused by an {@link ODataHttpException}, otherwise <code>false</code>.
*/
public boolean isCausedByHttpException() {
return getHttpExceptionCause() != null;
}
/**
* Search for and return first (from top) {@link ODataHttpException} in cause hierarchy.
* If no {@link ODataHttpException} in cause hierarchy <code>NULL</code> is returned.
*
* @return the first found {@link ODataHttpException} in the cause exception hierarchy.
* Or <code>NULL</code> if no {@link ODataHttpException} is found in cause hierarchy.
*/
public ODataHttpException getHttpExceptionCause() {
return getSpecificCause(ODataHttpException.class);
}
/**
* Check whether this exception was caused by a {@link ODataApplicationException} exception.
*
* @return <code>true</code> if it was caused by an {@link ODataApplicationException}, otherwise <code>false</code>.
*/
public boolean isCausedByApplicationException() {
return getApplicationExceptionCause() != null;
}
/**
* Search for and return first (from top) {@link ODataApplicationException} in cause hierarchy.
* If no {@link ODataApplicationException} in cause hierarchy <code>NULL</code> is returned.
*
* @return the first found {@link ODataApplicationException} in the cause exception hierarchy.
* Or <code>NULL</code> if no {@link ODataApplicationException} is found in cause hierarchy.
*/
public ODataApplicationException getApplicationExceptionCause() {
return getSpecificCause(ODataApplicationException.class);
}
private <T> T getSpecificCause(Class<T> causeClass) {
Throwable cause = getCause();
while (cause != null) {
if (causeClass.isInstance(cause)) {
return causeClass.cast(cause);
}
cause = cause.getCause();
}
return null;
}
public boolean isCausedByMessageException() {
return getMessageExceptionCause() != null;
}
public Exception getMessageExceptionCause() {
return getSpecificCause(ODataMessageException.class);
}
}
| Fixed wrong return type.
Change-Id: I2b7da58425c98915fdb8598752902ad61853098d
| odata-api/src/main/java/com/sap/core/odata/api/exception/ODataException.java | Fixed wrong return type. |
|
Java | apache-2.0 | 167ae1965e3e89102345f32b03a9d0f87d0b548f | 0 | duke2906/traccar,orcoliver/traccar,stalien/traccar_test,5of9/traccar,AnshulJain1985/Roadcast-Tracker,jssenyange/traccar,jon-stumpf/traccar,orcoliver/traccar,renaudallard/traccar,al3x1s/traccar,tananaev/traccar,jssenyange/traccar,ninioe/traccar,jssenyange/traccar,vipien/traccar,ninioe/traccar,AnshulJain1985/Roadcast-Tracker,al3x1s/traccar,vipien/traccar,ninioe/traccar,tsmgeek/traccar,duke2906/traccar,tsmgeek/traccar,joseant/traccar-1,tananaev/traccar,tsmgeek/traccar,joseant/traccar-1,jon-stumpf/traccar,stalien/traccar_test,renaudallard/traccar,orcoliver/traccar,tananaev/traccar,5of9/traccar,jon-stumpf/traccar | /*
* Copyright 2013 Anton Tananaev ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.traccar.protocol;
import java.nio.charset.Charset;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.handler.codec.frame.FrameDecoder;
import org.traccar.helper.ChannelBufferTools;
public class H02FrameDecoder extends FrameDecoder {
private static final int MESSAGE_LENGTH = 32;
@Override
protected Object decode(
ChannelHandlerContext ctx,
Channel channel,
ChannelBuffer buf) throws Exception {
String marker = buf.toString(buf.readerIndex(), 1, Charset.defaultCharset());
while (!marker.equals("*") && !marker.equals("$") && buf.readableBytes() > 0) {
buf.skipBytes(1);
if (buf.readableBytes() > 0) {
marker = buf.toString(buf.readerIndex(), 1, Charset.defaultCharset());
}
}
if (marker.equals("*")) {
// Return text message
Integer index = ChannelBufferTools.find(buf, buf.readerIndex(), buf.readableBytes(), "#");
if (index != null) {
return buf.readBytes(index + 1 - buf.readerIndex());
}
} else if (marker.equals("$")) {
// Return binary message
if (buf.readableBytes() >= MESSAGE_LENGTH) {
return buf.readBytes(MESSAGE_LENGTH);
}
}
return null;
}
}
| src/org/traccar/protocol/H02FrameDecoder.java | /*
* Copyright 2013 Anton Tananaev ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.traccar.protocol;
import java.nio.charset.Charset;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.handler.codec.frame.FrameDecoder;
import org.traccar.helper.ChannelBufferTools;
public class H02FrameDecoder extends FrameDecoder {
private static final int MESSAGE_LENGTH = 32;
@Override
protected Object decode(
ChannelHandlerContext ctx,
Channel channel,
ChannelBuffer buf) throws Exception {
String marker = buf.toString(buf.readerIndex(), 1, Charset.defaultCharset());
while (!marker.equals("*") && !marker.equals("$") && buf.readableBytes() > 0) {
buf.skipBytes(1);
marker = buf.toString(buf.readerIndex(), 1, Charset.defaultCharset());
}
if (marker.equals("*")) {
// Return text message
Integer index = ChannelBufferTools.find(buf, buf.readerIndex(), buf.readableBytes(), "#");
if (index != null) {
return buf.readBytes(index + 1 - buf.readerIndex());
}
} else if (marker.equals("$")) {
// Return binary message
if (buf.readableBytes() >= MESSAGE_LENGTH) {
return buf.readBytes(MESSAGE_LENGTH);
}
}
return null;
}
}
| Fix H02 frame decoder
| src/org/traccar/protocol/H02FrameDecoder.java | Fix H02 frame decoder |
|
Java | apache-2.0 | 845c8fe8e04ec4d94e24893333ece58cf2a61662 | 0 | ibnc/gocd,tomzo/gocd,stevem999/gocd,kierarad/gocd,marques-work/gocd,ketan/gocd,varshavaradarajan/gocd,varshavaradarajan/gocd,marques-work/gocd,jyotisingh/gocd,naveenbhaskar/gocd,ind9/gocd,GaneshSPatil/gocd,ind9/gocd,varshavaradarajan/gocd,bdpiparva/gocd,marques-work/gocd,Skarlso/gocd,Skarlso/gocd,naveenbhaskar/gocd,arvindsv/gocd,ketan/gocd,jyotisingh/gocd,gocd/gocd,ibnc/gocd,gocd/gocd,ind9/gocd,ketan/gocd,naveenbhaskar/gocd,arvindsv/gocd,Skarlso/gocd,varshavaradarajan/gocd,GaneshSPatil/gocd,marques-work/gocd,GaneshSPatil/gocd,stevem999/gocd,tomzo/gocd,GaneshSPatil/gocd,bdpiparva/gocd,naveenbhaskar/gocd,ketan/gocd,stevem999/gocd,tomzo/gocd,ibnc/gocd,ketan/gocd,gocd/gocd,ketan/gocd,varshavaradarajan/gocd,bdpiparva/gocd,ind9/gocd,marques-work/gocd,bdpiparva/gocd,bdpiparva/gocd,naveenbhaskar/gocd,arvindsv/gocd,varshavaradarajan/gocd,stevem999/gocd,kierarad/gocd,stevem999/gocd,tomzo/gocd,gocd/gocd,kierarad/gocd,GaneshSPatil/gocd,tomzo/gocd,arvindsv/gocd,gocd/gocd,kierarad/gocd,gocd/gocd,tomzo/gocd,Skarlso/gocd,jyotisingh/gocd,ibnc/gocd,Skarlso/gocd,marques-work/gocd,kierarad/gocd,jyotisingh/gocd,jyotisingh/gocd,jyotisingh/gocd,Skarlso/gocd,bdpiparva/gocd,ibnc/gocd,ibnc/gocd,arvindsv/gocd,GaneshSPatil/gocd,ind9/gocd,kierarad/gocd,arvindsv/gocd,naveenbhaskar/gocd | /*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.domain.materials.perforce;
import com.thoughtworks.go.config.materials.perforce.P4Material;
import com.thoughtworks.go.helper.P4TestRepo;
import com.thoughtworks.go.util.command.CommandLineException;
import com.thoughtworks.go.util.command.ConsoleResult;
public class P4Fixture {
private P4TestRepo repo;
public void start() {
}
public void setRepo(P4TestRepo repo) {
this.repo = repo;
}
public void stop(P4Client p4) {
stopP4d(p4);
repo.onTearDown();
}
private void stopP4d(P4Client p4) {
try {
p4.admin("stop");
ConsoleResult consoleResult = p4.checkConnection();
while (!consoleResult.failed()) {
try {
//Wait for the server to shutdown
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
}
} catch (CommandLineException expected) {
if (isKnownWindowsError(expected)) {
return;
}
if (expected.getResult().errorAsString().contains("Connection refused")) {
return;
}
throw expected;
}
}
private boolean isKnownWindowsError(CommandLineException expected) {
// Stopping p4d on windows returns the following failure:
return expected.getResult().errorAsString().contains("WSAECONNRESET") || expected.getResult().errorAsString().contains("WSAECONNREFUSED");
}
public String port() {
return repo.serverAndPort();
}
public P4Client createClient() throws Exception {
return repo.createClient();
}
public P4Client createClient(String name, String view) throws Exception {
return repo.createClientWith(name, view);
}
public P4Material material(String view) {
return repo.material(view);
}
public P4Material material(String view, String dest) {
P4Material p4Material = material(view);
p4Material.setFolder(dest);
return p4Material;
}
}
| common/test/unit/com/thoughtworks/go/domain/materials/perforce/P4Fixture.java | /*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.domain.materials.perforce;
import com.thoughtworks.go.config.materials.perforce.P4Material;
import com.thoughtworks.go.helper.P4TestRepo;
import com.thoughtworks.go.util.command.CommandLineException;
import com.thoughtworks.go.util.command.ConsoleResult;
public class P4Fixture {
private P4TestRepo repo;
public void start() {
}
public void setRepo(P4TestRepo repo) {
this.repo = repo;
}
public void stop(P4Client p4) {
stopP4d(p4);
repo.onTearDown();
}
private void stopP4d(P4Client p4) {
try {
p4.admin("stop");
ConsoleResult consoleResult = p4.checkConnection();
while (!consoleResult.failed()) {
try {
//Wait for the server to shutdown
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
}
} catch (CommandLineException expected) {
if (isKnownWindowsError(expected)) {
return;
}
if (expected.getResult().errorAsString().contains("Connection refused")) {
return;
}
throw expected;
}
}
private boolean isKnownWindowsError(CommandLineException expected) {
// Stopping p4d on windows returns the following failure:
return expected.getResult().errorAsString().contains("WSAECONNRESET");
}
public String port() {
return repo.serverAndPort();
}
public P4Client createClient() throws Exception {
return repo.createClient();
}
public P4Client createClient(String name, String view) throws Exception {
return repo.createClientWith(name, view);
}
public P4Material material(String view) {
return repo.material(view);
}
public P4Material material(String view, String dest) {
P4Material p4Material = material(view);
p4Material.setFolder(dest);
return p4Material;
}
}
| Fix specs on windows
| common/test/unit/com/thoughtworks/go/domain/materials/perforce/P4Fixture.java | Fix specs on windows |
|
Java | apache-2.0 | f5264644c164bc1e76b9537ce2b0982fc2dee098 | 0 | cuba-platform/cuba,cuba-platform/cuba,cuba-platform/cuba | /*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.core.app.entitysql;
import com.google.common.base.Preconditions;
import com.haulmont.bali.datastruct.Pair;
import com.haulmont.chile.core.datatypes.Datatype;
import com.haulmont.chile.core.datatypes.impl.EnumClass;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.chile.core.model.MetaProperty;
import com.haulmont.chile.core.model.MetaPropertyPath;
import com.haulmont.cuba.core.Persistence;
import com.haulmont.cuba.core.entity.BaseEntityInternalAccess;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.entity.annotation.Extends;
import com.haulmont.cuba.core.global.Metadata;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.persistence.*;
import java.lang.reflect.Field;
import java.sql.Time;
import java.text.SimpleDateFormat;
import java.util.*;
import static java.lang.String.format;
import static java.lang.String.valueOf;
@Component(EntitySqlGenerator.NAME)
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class EntitySqlGenerator {
public static final String NAME = "cuba_EntitySqlGenerator";
public static final String ID = "id";
protected SimpleDateFormat dateTimeFormat = new SimpleDateFormat("''yyyy-MM-dd HH:mm:ss''");
protected SimpleDateFormat dateFormat = new SimpleDateFormat("''yyyy-MM-dd''");
protected SimpleDateFormat timeFormat = new SimpleDateFormat("''HH:mm:ss''");
protected String insertTemplate = "insert into %s \n(%s) \nvalues (%s);";
protected String updateTemplate = "update %s \nset %s \nwhere %s%s;";
protected String selectTemplate = "select %s from %s where %s";
protected Class clazz;
protected MetaClass metaClass;
protected List<Table> tables = new LinkedList<>();
protected String discriminatorValue;
@Inject
protected Metadata metadata;
@Inject
protected Persistence persistence;
public EntitySqlGenerator(Class<? extends Entity> clazz) {
this.clazz = clazz;
}
@PostConstruct
public void init() {
metaClass = metadata.getClass(clazz);
collectTableMetadata(metaClass, new Table(null));
if (tables.isEmpty()) {
throw new IllegalStateException(
format("Could not generate scripts for class %s, because it's not linked with any database tables.", clazz));
}
}
public String generateInsertScript(Entity entity) {
Preconditions.checkArgument(entity.getClass().equals(clazz),
format("Could not generate insert script for entity with class [%s]. This script generator is for class [%s]",
entity.getClass().getName(),
clazz.getClass()));
StringBuilder result = new StringBuilder();
for (Table table : tables) {
result.append(table.insert(entity)).append("\n");
}
return result.toString();
}
public String generateUpdateScript(Entity entity) {
Preconditions.checkArgument(entity.getClass().equals(clazz),
format("Could not generate update script for entity with class [%s]. This script generator is for class [%s]",
entity.getClass().getName(),
clazz.getClass()));
StringBuilder result = new StringBuilder();
for (Table table : tables) {
result.append(table.update(entity)).append("\n");
}
return result.toString();
}
public String generateSelectScript(Entity entity) {
Preconditions.checkArgument(entity.getClass().equals(clazz),
format("Could not generate select script for entity with class [%s]. This script generator is for class [%s]",
entity.getClass().getName(),
clazz.getClass()));
List<String> columns = new ArrayList<>();
List<String> tableNames = new ArrayList<>();
List<String> where = new ArrayList<>();
String tableAlias = null;
FieldEntry tableIdColumn = null;
for (int i = 0; i < tables.size(); i++) {
Table table = tables.get(i);
tableIdColumn = table.fieldToColumnMapping.get(ID);
tableAlias = format("t%s", valueOf(i));
String parentAlias = format("t%s", valueOf(i - 1));
tableNames.add(table.name + " " + tableAlias);
for (FieldEntry fieldEntry : table.fieldToColumnMapping.values()) {
columns.addAll(convertFieldNames(tableAlias, fieldEntry));
}
if (table.parent != null) {
FieldEntry parentIdColumn = table.parent.fieldToColumnMapping.get(ID);
where.add(format("%s.%s = %s.%s", tableAlias, tableIdColumn.columnName, parentAlias, parentIdColumn.columnName));
}
}
where.addAll(convertWhere(tableAlias, tableIdColumn, entity));
return format(selectTemplate, convertList(columns), convertList(tableNames),
convertList(where).replaceAll(",", " and "));
}
protected List<String> convertFieldNames(String tableAlias, FieldEntry fieldEntry) {
List<String> columns = new ArrayList<>();
if (fieldEntry.isEmbedded) {
for (FieldEntry entry : fieldEntry.fieldsMapping.values()) {
columns.addAll(convertFieldNames(tableAlias, entry));
}
} else {
columns.add(tableAlias + "." + fieldEntry.columnName);
}
return columns;
}
protected List<String> convertWhere(String tableAlias, FieldEntry fieldEntry, Entity entity) {
List<String> where = new ArrayList<>();
if (fieldEntry.isEmbedded) {
for (FieldEntry entry : fieldEntry.fieldsMapping.values()) {
where.addAll(convertWhere(tableAlias, entry, entity));
}
} else {
where.add(tableAlias + "." + fieldEntry.columnName + " = " +
convertValue(entity, fieldEntry.getFieldName(), entity.getValueEx(fieldEntry.getFieldName())));
}
return where;
}
protected String convertValue(Entity entity, String fieldName, @Nullable Object value) {
try {
String valueStr;
if (value instanceof Entity) {
value = ((Entity) value).getId();
} else if (value instanceof EnumClass) {
value = ((EnumClass) value).getId();
} else if (value instanceof Enum) {
value = BaseEntityInternalAccess.getValue(entity, fieldName);
}
value = persistence.getDbTypeConverter().getSqlObject(value);
if (value == null) {
valueStr = null;
} else if (value instanceof Date) {
MetaPropertyPath propertyPath = metaClass.getPropertyPath(fieldName);
if (propertyPath != null) {
MetaProperty property = propertyPath.getMetaProperty();
Datatype datatype = property.getRange().asDatatype();
if (datatype.getJavaClass().equals(java.sql.Date.class)) {
valueStr = dateFormat.format((Date) value);
} else if (datatype.getJavaClass().equals(Time.class)) {
valueStr = timeFormat.format((Date) value);
} else {
valueStr = dateTimeFormat.format((Date) value);
}
} else {
valueStr = dateTimeFormat.format((Date) value);
}
} else if (value instanceof String
|| value instanceof UUID
|| value.getClass().getName().toLowerCase().contains("uuid")
|| value instanceof Character) {
if (value instanceof String) {
value = ((String) value).replaceAll("\'", "''");
}
valueStr = format("'%s'", value);
} else {
valueStr = value.toString();
}
return valueStr;
} catch (Exception e) {
throw new RuntimeException(format("An error occurred while converting object [%s] for SQL query", value), e);
}
}
protected String convertList(List<String> strings) {
String string = strings.toString();
return string.substring(1, string.length() - 1);
}
protected void collectTableMetadata(MetaClass metaClass, Table table) {
Class<?> javaClass = metaClass.getJavaClass();
javax.persistence.Table annotation = javaClass.getAnnotation(javax.persistence.Table.class);
MetaClass ancestor = metaClass.getAncestor();
if (annotation != null && StringUtils.isNotEmpty(annotation.name())) {
if (table.name == null) {
table.name = annotation.name();
tables.add(0, table);
} else {
Table newTable = new Table(annotation.name());
tables.add(0, newTable);
table.parent = newTable;
table = newTable;
}
}
if (ancestor != null) {
collectTableMetadata(ancestor, table);
}
table.collectMetadata(javaClass);
}
protected class Table {
protected Table parent;
protected String name;
protected String idColumn;
protected String discriminatorColumn;
protected DiscriminatorType discriminatorType;
protected Map<String, FieldEntry> fieldToColumnMapping = new LinkedHashMap<>();
public Table(String name) {
this.name = name;
}
public String insert(Entity entity) {
List<String> columnNames = new ArrayList<>();
List<String> valuesStr = new ArrayList<>();
if (discriminatorColumn != null) {
String discriminatorValueStr = convertValue(null, null, discriminatorValue());
columnNames.add(discriminatorColumn);
valuesStr.add(discriminatorValueStr);
}
for (Map.Entry<String, FieldEntry> entry : fieldToColumnMapping.entrySet()) {
Pair<List<String>, List<String>> insertStrings = getInsertStrings(entry.getValue(), entity);
columnNames.addAll(insertStrings.getFirst());
valuesStr.addAll(insertStrings.getSecond());
}
return format(insertTemplate, name, convertList(columnNames), convertList(valuesStr));
}
protected Pair<List<String>, List<String>> getInsertStrings(FieldEntry fieldEntry, Entity entity) {
List<String> columnNames = new ArrayList<>();
List<String> valuesStr = new ArrayList<>();
String fieldName = fieldEntry.getFieldName();
if (fieldEntry.isEmbedded) {
for (FieldEntry entry : fieldEntry.fieldsMapping.values()) {
Pair<List<String>, List<String>> insertStrings = getInsertStrings(entry, entity);
columnNames.addAll(insertStrings.getFirst());
valuesStr.addAll(insertStrings.getSecond());
}
} else {
Object value = entity.getValueEx(fieldName);
columnNames.add(fieldEntry.columnName);
valuesStr.add(convertValue(entity, fieldName, value));
}
return new Pair<>(columnNames, valuesStr);
}
public String update(Entity entity) {
List<String> valuesStr = new ArrayList<>();
List<String> whereStr = new ArrayList<>();
for (Map.Entry<String, FieldEntry> entry : fieldToColumnMapping.entrySet()) {
Pair<List<String>, List<String>> insertStrings = getUpdateStrings(entry, entity);
valuesStr.addAll(insertStrings.getFirst());
whereStr.addAll(insertStrings.getSecond());
}
return format(updateTemplate, name, convertList(valuesStr), "", convertList(whereStr).replaceAll(",", " and "));
}
protected Pair<List<String>, List<String>> getUpdateStrings(Map.Entry<String, FieldEntry> mapEntry, Entity entity) {
List<String> valuesStr = new ArrayList<>();
List<String> whereStr = new ArrayList<>();
FieldEntry fieldEntry = mapEntry.getValue();
String fieldName = fieldEntry.getFieldName();
if (!ID.equals(mapEntry.getKey())) {
if (fieldEntry.isEmbedded) {
for (Map.Entry<String, FieldEntry> entry : fieldEntry.fieldsMapping.entrySet()) {
Pair<List<String>, List<String>> updateStrings = getUpdateStrings(entry, entity);
valuesStr.addAll(updateStrings.getSecond());
}
} else {
Object value = entity.getValueEx(fieldName);
valuesStr.add(format("%s=%s", fieldEntry.columnName, convertValue(entity, fieldName, value)));
}
} else {
if (fieldEntry.isEmbedded) {
for (Map.Entry<String, FieldEntry> entry : fieldEntry.fieldsMapping.entrySet()) {
Pair<List<String>, List<String>> updateStrings = getUpdateStrings(entry, entity);
whereStr.addAll(updateStrings.getFirst());
}
} else {
Object value = entity.getValueEx(fieldName);
whereStr.add(format("%s=%s", fieldEntry.columnName, convertValue(entity, fieldName, value)));
}
}
return new Pair<>(valuesStr, whereStr);
}
@Nullable
protected Object discriminatorValue() {
if (discriminatorValue == null) {
return null;
} else {
if (discriminatorColumn != null && discriminatorType != null) {
switch (discriminatorType) {
case CHAR:
return discriminatorValue.charAt(0);
case INTEGER:
return Integer.valueOf(discriminatorValue);
case STRING:
return discriminatorValue;
}
}
}
return null;
}
protected void collectMetadata(Class clazz) {
if (clazz == null) return;
PrimaryKeyJoinColumn primaryKey = (PrimaryKeyJoinColumn) clazz.getAnnotation(PrimaryKeyJoinColumn.class);
if (primaryKey != null) {
idColumn = primaryKey.name();
} else {
idColumn = resolveIdColumn();
}
fieldToColumnMapping.put(ID, new FieldEntry(ID, idColumn));
DiscriminatorValue discriminatorValueAnnotation = (DiscriminatorValue) clazz.getAnnotation(DiscriminatorValue.class);
Extends extendsAnnotation = (Extends) clazz.getAnnotation(Extends.class);
javax.persistence.Entity entityAnnotation = (javax.persistence.Entity) clazz.getAnnotation(javax.persistence.Entity.class);
DiscriminatorColumn discriminatorColumn = (DiscriminatorColumn) clazz.getAnnotation(DiscriminatorColumn.class);
if (discriminatorValueAnnotation != null) {
discriminatorValue = discriminatorValueAnnotation.value();
} else if (extendsAnnotation != null && entityAnnotation != null) {
discriminatorValue = entityAnnotation.name();
} else if (entityAnnotation != null && discriminatorColumn != null
&& discriminatorColumn.discriminatorType().equals(DiscriminatorType.STRING)) {
discriminatorValue = entityAnnotation.name();
} else if (entityAnnotation != null && primaryKey != null) {
discriminatorValue = entityAnnotation.name();
}
if (discriminatorColumn != null) {
this.discriminatorColumn = discriminatorColumn.name();
this.discriminatorType = discriminatorColumn.discriminatorType();
} else if (discriminatorValue != null && parent == null) {
this.discriminatorColumn = "DTYPE";
this.discriminatorType = DiscriminatorType.STRING;
}
fieldToColumnMapping.putAll(collectFields(clazz));
}
private String resolveIdColumn() {
if (idColumn != null) {
return idColumn;
} else if (parent != null) {
return parent.resolveIdColumn();
}
return ID.toUpperCase();
}
private Map<String, FieldEntry> collectFields(Class clazz) {
Map<String, FieldEntry> result = new LinkedHashMap<>();
for (Field field : clazz.getDeclaredFields()) {
Embedded embedded = field.getAnnotation(Embedded.class);
AttributeOverrides attributeOverrides = field.getAnnotation(AttributeOverrides.class);
AssociationOverrides associationOverrides = field.getAnnotation(AssociationOverrides.class);
Column columnAnnotation = field.getAnnotation(Column.class);
JoinColumn joinColumnAnnotation = field.getAnnotation(JoinColumn.class);
EmbeddedId embeddedIdAnnotation = field.getAnnotation(EmbeddedId.class);
Id idAnnotation = field.getAnnotation(Id.class);
if (embedded != null || embeddedIdAnnotation != null) {
Class<?> embeddedObjectType = field.getType();
Map<String, FieldEntry> embeddedFields = collectFields(embeddedObjectType);
if (attributeOverrides != null) {
overrideAttributes(attributeOverrides, embeddedFields);
}
if (associationOverrides != null) {
overrideAssociations(associationOverrides, embeddedFields);
}
result.put(field.getName(), new FieldEntry(field.getName(), embeddedFields));
} else if (columnAnnotation != null) {
FieldEntry entry = new FieldEntry(field.getName(), columnAnnotation.name());
if (idAnnotation != null) {
idColumn = columnAnnotation.name();
result.put(ID, entry);
} else {
result.put(field.getName(), entry);
}
} else if (joinColumnAnnotation != null) {
result.put(field.getName(), new FieldEntry(field.getName(), joinColumnAnnotation.name()));
}
}
return result;
}
private void overrideAttributes(AttributeOverrides overrides, Map<String, FieldEntry> embeddedFields) {
AttributeOverride[] overriddenAttributes = overrides.value();
for (AttributeOverride overriddenAttribute : overriddenAttributes) {
embeddedFields.put(overriddenAttribute.name(), new FieldEntry(overriddenAttribute.name(), overriddenAttribute.column().name()));
}
}
private void overrideAssociations(AssociationOverrides overrides, Map<String, FieldEntry> embeddedFields) {
AssociationOverride[] overriddenAttributes = overrides.value();
for (AssociationOverride overriddenAttribute : overriddenAttributes) {
if (overriddenAttribute.joinColumns().length == 1) {
embeddedFields.put(overriddenAttribute.name(), new FieldEntry(overriddenAttribute.name(), overriddenAttribute.joinColumns()[0].name()));
}
}
}
}
protected static class FieldEntry {
protected String fieldName;
protected boolean isEmbedded;
protected String columnName;
protected Map<String, FieldEntry> fieldsMapping;
protected FieldEntry parentField;
FieldEntry(String fieldName, String columnName) {
this.fieldName = fieldName;
this.columnName = columnName;
this.isEmbedded = false;
}
FieldEntry(String fieldName, Map<String, FieldEntry> fieldsMapping) {
this.fieldName = fieldName;
this.fieldsMapping = fieldsMapping;
for (FieldEntry fieldEntry : this.fieldsMapping.values()) {
fieldEntry.parentField = this;
}
this.isEmbedded = true;
}
protected String getFieldName() {
if (parentField != null) {
return parentField.getFieldName() + "." + fieldName;
}
return fieldName;
}
}
} | modules/core/src/com/haulmont/cuba/core/app/entitysql/EntitySqlGenerator.java | /*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.core.app.entitysql;
import com.google.common.base.Preconditions;
import com.haulmont.bali.datastruct.Pair;
import com.haulmont.chile.core.datatypes.Datatype;
import com.haulmont.chile.core.datatypes.impl.EnumClass;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.chile.core.model.MetaProperty;
import com.haulmont.chile.core.model.MetaPropertyPath;
import com.haulmont.cuba.core.Persistence;
import com.haulmont.cuba.core.entity.BaseEntityInternalAccess;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.entity.annotation.Extends;
import com.haulmont.cuba.core.global.Metadata;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.persistence.*;
import java.lang.reflect.Field;
import java.sql.Time;
import java.text.SimpleDateFormat;
import java.util.*;
import static java.lang.String.format;
import static java.lang.String.valueOf;
@Component(EntitySqlGenerator.NAME)
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class EntitySqlGenerator {
public static final String NAME = "cuba_EntitySqlGenerator";
public static final String ID = "id";
protected SimpleDateFormat dateTimeFormat = new SimpleDateFormat("''yyyy-MM-dd HH:mm:ss''");
protected SimpleDateFormat dateFormat = new SimpleDateFormat("''yyyy-MM-dd''");
protected SimpleDateFormat timeFormat = new SimpleDateFormat("''HH:mm:ss''");
protected String insertTemplate = "insert into %s \n(%s) \nvalues (%s);";
protected String updateTemplate = "update %s \nset %s \nwhere %s%s;";
protected String selectTemplate = "select %s from %s where %s";
protected Class clazz;
protected MetaClass metaClass;
protected List<Table> tables = new LinkedList<>();
protected String discriminatorValue;
@Inject
protected Metadata metadata;
@Inject
protected Persistence persistence;
public EntitySqlGenerator(Class<? extends Entity> clazz) {
this.clazz = clazz;
}
@PostConstruct
public void init() {
metaClass = metadata.getClass(clazz);
collectTableMetadata(metaClass, new Table(null));
if (tables.isEmpty()) {
throw new IllegalStateException(
format("Could not generate scripts for class %s, because it's not linked with any database tables.", clazz));
}
}
public String generateInsertScript(Entity entity) {
Preconditions.checkArgument(entity.getClass().equals(clazz),
format("Could not generate insert script for entity with class [%s]. This script generator is for class [%s]",
entity.getClass().getName(),
clazz.getClass()));
StringBuilder result = new StringBuilder();
for (Table table : tables) {
result.append(table.insert(entity)).append("\n");
}
return result.toString();
}
public String generateUpdateScript(Entity entity) {
Preconditions.checkArgument(entity.getClass().equals(clazz),
format("Could not generate update script for entity with class [%s]. This script generator is for class [%s]",
entity.getClass().getName(),
clazz.getClass()));
StringBuilder result = new StringBuilder();
for (Table table : tables) {
result.append(table.update(entity)).append("\n");
}
return result.toString();
}
public String generateSelectScript(Entity entity) {
Preconditions.checkArgument(entity.getClass().equals(clazz),
format("Could not generate select script for entity with class [%s]. This script generator is for class [%s]",
entity.getClass().getName(),
clazz.getClass()));
List<String> columns = new ArrayList<>();
List<String> tableNames = new ArrayList<>();
List<String> where = new ArrayList<>();
String tableAlias = null;
FieldEntry tableIdColumn = null;
for (int i = 0; i < tables.size(); i++) {
Table table = tables.get(i);
tableIdColumn = table.fieldToColumnMapping.get(ID);
tableAlias = format("t%s", valueOf(i));
String parentAlias = format("t%s", valueOf(i - 1));
tableNames.add(table.name + " " + tableAlias);
for (FieldEntry fieldEntry : table.fieldToColumnMapping.values()) {
columns.addAll(convertFieldNames(tableAlias, fieldEntry));
}
if (table.parent != null) {
FieldEntry parentIdColumn = table.parent.fieldToColumnMapping.get(ID);
where.add(format("%s.%s = %s.%s", tableAlias, tableIdColumn.columnName, parentAlias, parentIdColumn.columnName));
}
}
where.addAll(convertWhere(tableAlias, tableIdColumn, entity));
return format(selectTemplate, convertList(columns), convertList(tableNames),
convertList(where).replaceAll(",", " and "));
}
protected List<String> convertFieldNames(String tableAlias, FieldEntry fieldEntry) {
List<String> columns = new ArrayList<>();
if (fieldEntry.isEmbedded) {
for (FieldEntry entry : fieldEntry.fieldsMapping.values()) {
columns.addAll(convertFieldNames(tableAlias, entry));
}
} else {
columns.add(tableAlias + "." + fieldEntry.columnName);
}
return columns;
}
protected List<String> convertWhere(String tableAlias, FieldEntry fieldEntry, Entity entity) {
List<String> where = new ArrayList<>();
if (fieldEntry.isEmbedded) {
for (FieldEntry entry : fieldEntry.fieldsMapping.values()) {
where.addAll(convertWhere(tableAlias, entry, entity));
}
} else {
where.add(tableAlias + "." + fieldEntry.columnName + " = " +
convertValue(entity, fieldEntry.getFieldName(), entity.getValueEx(fieldEntry.getFieldName())));
}
return where;
}
protected String convertValue(Entity entity, String fieldName, @Nullable Object value) {
try {
String valueStr;
if (value instanceof Entity) {
value = ((Entity) value).getId();
} else if (value instanceof EnumClass) {
value = ((EnumClass) value).getId();
} else if (value instanceof Enum) {
value = BaseEntityInternalAccess.getValue(entity, fieldName);
}
value = persistence.getDbTypeConverter().getSqlObject(value);
if (value == null) {
valueStr = null;
} else if (value instanceof Date) {
MetaPropertyPath propertyPath = metaClass.getPropertyPath(fieldName);
if (propertyPath != null) {
MetaProperty property = propertyPath.getMetaProperty();
Datatype datatype = property.getRange().asDatatype();
if (datatype.getJavaClass().equals(java.sql.Date.class)) {
valueStr = dateFormat.format((Date) value);
} else if (datatype.getJavaClass().equals(Time.class)) {
valueStr = timeFormat.format((Date) value);
} else {
valueStr = dateTimeFormat.format((Date) value);
}
} else {
valueStr = dateTimeFormat.format((Date) value);
}
} else if (value instanceof String
|| value instanceof UUID
|| value.getClass().getName().toLowerCase().contains("uuid")
|| value instanceof Character) {
if (value instanceof String) {
value = ((String) value).replaceAll("\'", "''");
}
valueStr = format("'%s'", value);
} else {
valueStr = value.toString();
}
return valueStr;
} catch (Exception e) {
throw new RuntimeException(format("An error occurred while converting object [%s] for SQL query", value), e);
}
}
protected String convertList(List<String> strings) {
String string = strings.toString();
return string.substring(1, string.length() - 1);
}
protected void collectTableMetadata(MetaClass metaClass, Table table) {
Class<?> javaClass = metaClass.getJavaClass();
javax.persistence.Table annotation = javaClass.getAnnotation(javax.persistence.Table.class);
MetaClass ancestor = metaClass.getAncestor();
if (annotation != null && StringUtils.isNotEmpty(annotation.name())) {
if (table.name == null) {
table.name = annotation.name();
tables.add(0, table);
} else {
Table newTable = new Table(annotation.name());
tables.add(0, newTable);
table.parent = newTable;
table = newTable;
}
}
if (ancestor != null) {
collectTableMetadata(ancestor, table);
}
table.collectMetadata(javaClass);
}
protected class Table {
protected Table parent;
protected String name;
protected String idColumn;
protected String discriminatorColumn;
protected DiscriminatorType discriminatorType;
protected Map<String, FieldEntry> fieldToColumnMapping = new LinkedHashMap<>();
public Table(String name) {
this.name = name;
}
public String insert(Entity entity) {
List<String> columnNames = new ArrayList<>();
List<String> valuesStr = new ArrayList<>();
if (discriminatorColumn != null) {
String discriminatorValueStr = convertValue(null, null, discriminatorValue());
columnNames.add(discriminatorColumn);
valuesStr.add(discriminatorValueStr);
}
for (Map.Entry<String, FieldEntry> entry : fieldToColumnMapping.entrySet()) {
Pair<List<String>, List<String>> insertStrings = getInsertStrings(entry.getValue(), entity);
columnNames.addAll(insertStrings.getFirst());
valuesStr.addAll(insertStrings.getSecond());
}
return format(insertTemplate, name, convertList(columnNames), convertList(valuesStr));
}
protected Pair<List<String>, List<String>> getInsertStrings(FieldEntry fieldEntry, Entity entity) {
List<String> columnNames = new ArrayList<>();
List<String> valuesStr = new ArrayList<>();
String fieldName = fieldEntry.getFieldName();
if (fieldEntry.isEmbedded) {
for (FieldEntry entry : fieldEntry.fieldsMapping.values()) {
Pair<List<String>, List<String>> insertStrings = getInsertStrings(entry, entity);
columnNames.addAll(insertStrings.getFirst());
valuesStr.addAll(insertStrings.getSecond());
}
} else {
Object value = entity.getValueEx(fieldName);
columnNames.add(fieldEntry.columnName);
valuesStr.add(convertValue(entity, fieldName, value));
}
return new Pair<>(columnNames, valuesStr);
}
public String update(Entity entity) {
List<String> valuesStr = new ArrayList<>();
List<String> whereStr = new ArrayList<>();
for (Map.Entry<String, FieldEntry> entry : fieldToColumnMapping.entrySet()) {
Pair<List<String>, List<String>> insertStrings = getUpdateStrings(entry.getValue(), entity);
valuesStr.addAll(insertStrings.getFirst());
whereStr.addAll(insertStrings.getSecond());
}
return format(updateTemplate, name, convertList(valuesStr), "", convertList(whereStr).replaceAll(",", " and "));
}
protected Pair<List<String>, List<String>> getUpdateStrings(FieldEntry fieldEntry, Entity entity) {
List<String> valuesStr = new ArrayList<>();
List<String> whereStr = new ArrayList<>();
String fieldName = fieldEntry.getFieldName();
if (!fieldName.equalsIgnoreCase(ID)) {
if (fieldEntry.isEmbedded) {
for (FieldEntry entry : fieldEntry.fieldsMapping.values()) {
Pair<List<String>, List<String>> updateStrings = getUpdateStrings(entry, entity);
valuesStr.addAll(updateStrings.getSecond());
}
} else {
Object value = entity.getValueEx(fieldName);
valuesStr.add(format("%s=%s", fieldEntry.columnName, convertValue(entity, fieldName, value)));
}
} else {
if (fieldEntry.isEmbedded) {
for (FieldEntry entry : fieldEntry.fieldsMapping.values()) {
Pair<List<String>, List<String>> updateStrings = getUpdateStrings(entry, entity);
whereStr.addAll(updateStrings.getFirst());
}
} else {
Object value = entity.getValueEx(fieldName);
whereStr.add(format("%s=%s", fieldEntry.columnName, convertValue(entity, fieldName, value)));
}
}
return new Pair<>(valuesStr, whereStr);
}
@Nullable
protected Object discriminatorValue() {
if (discriminatorValue == null) {
return null;
} else {
if (discriminatorColumn != null && discriminatorType != null) {
switch (discriminatorType) {
case CHAR:
return discriminatorValue.charAt(0);
case INTEGER:
return Integer.valueOf(discriminatorValue);
case STRING:
return discriminatorValue;
}
}
}
return null;
}
protected void collectMetadata(Class clazz) {
if (clazz == null) return;
PrimaryKeyJoinColumn primaryKey = (PrimaryKeyJoinColumn) clazz.getAnnotation(PrimaryKeyJoinColumn.class);
if (primaryKey != null) {
idColumn = primaryKey.name();
} else {
idColumn = resolveIdColumn();
}
fieldToColumnMapping.put(ID, new FieldEntry(ID, idColumn));
DiscriminatorValue discriminatorValueAnnotation = (DiscriminatorValue) clazz.getAnnotation(DiscriminatorValue.class);
Extends extendsAnnotation = (Extends) clazz.getAnnotation(Extends.class);
javax.persistence.Entity entityAnnotation = (javax.persistence.Entity) clazz.getAnnotation(javax.persistence.Entity.class);
DiscriminatorColumn discriminatorColumn = (DiscriminatorColumn) clazz.getAnnotation(DiscriminatorColumn.class);
if (discriminatorValueAnnotation != null) {
discriminatorValue = discriminatorValueAnnotation.value();
} else if (extendsAnnotation != null && entityAnnotation != null) {
discriminatorValue = entityAnnotation.name();
} else if (entityAnnotation != null && discriminatorColumn != null
&& discriminatorColumn.discriminatorType().equals(DiscriminatorType.STRING)) {
discriminatorValue = entityAnnotation.name();
} else if (entityAnnotation != null && primaryKey != null) {
discriminatorValue = entityAnnotation.name();
}
if (discriminatorColumn != null) {
this.discriminatorColumn = discriminatorColumn.name();
this.discriminatorType = discriminatorColumn.discriminatorType();
} else if (discriminatorValue != null && parent == null) {
this.discriminatorColumn = "DTYPE";
this.discriminatorType = DiscriminatorType.STRING;
}
fieldToColumnMapping.putAll(collectFields(clazz));
}
private String resolveIdColumn() {
if (idColumn != null) {
return idColumn;
} else if (parent != null) {
return parent.resolveIdColumn();
}
return ID.toUpperCase();
}
private Map<String, FieldEntry> collectFields(Class clazz) {
Map<String, FieldEntry> result = new LinkedHashMap<>();
for (Field field : clazz.getDeclaredFields()) {
Embedded embedded = field.getAnnotation(Embedded.class);
AttributeOverrides attributeOverrides = field.getAnnotation(AttributeOverrides.class);
AssociationOverrides associationOverrides = field.getAnnotation(AssociationOverrides.class);
Column columnAnnotation = field.getAnnotation(Column.class);
JoinColumn joinColumnAnnotation = field.getAnnotation(JoinColumn.class);
EmbeddedId embeddedIdAnnotation = field.getAnnotation(EmbeddedId.class);
if (embedded != null || embeddedIdAnnotation != null) {
Class<?> embeddedObjectType = field.getType();
Map<String, FieldEntry> embeddedFields = collectFields(embeddedObjectType);
if (attributeOverrides != null) {
overrideAttributes(attributeOverrides, embeddedFields);
}
if (associationOverrides != null) {
overrideAssociations(associationOverrides, embeddedFields);
}
result.put(field.getName(), new FieldEntry(field.getName(), embeddedFields));
} else if (columnAnnotation != null) {
result.put(field.getName(), new FieldEntry(field.getName(), columnAnnotation.name()));
} else if (joinColumnAnnotation != null) {
result.put(field.getName(), new FieldEntry(field.getName(), joinColumnAnnotation.name()));
}
}
return result;
}
private void overrideAttributes(AttributeOverrides overrides, Map<String, FieldEntry> embeddedFields) {
AttributeOverride[] overriddenAttributes = overrides.value();
for (AttributeOverride overriddenAttribute : overriddenAttributes) {
embeddedFields.put(overriddenAttribute.name(), new FieldEntry(overriddenAttribute.name(), overriddenAttribute.column().name()));
}
}
private void overrideAssociations(AssociationOverrides overrides, Map<String, FieldEntry> embeddedFields) {
AssociationOverride[] overriddenAttributes = overrides.value();
for (AssociationOverride overriddenAttribute : overriddenAttributes) {
if (overriddenAttribute.joinColumns().length == 1) {
embeddedFields.put(overriddenAttribute.name(), new FieldEntry(overriddenAttribute.name(), overriddenAttribute.joinColumns()[0].name()));
}
}
}
}
protected static class FieldEntry {
protected String fieldName;
protected boolean isEmbedded;
protected String columnName;
protected Map<String, FieldEntry> fieldsMapping;
protected FieldEntry parentField;
FieldEntry(String fieldName, String columnName) {
this.fieldName = fieldName;
this.columnName = columnName;
this.isEmbedded = false;
}
FieldEntry(String fieldName, Map<String, FieldEntry> fieldsMapping) {
this.fieldName = fieldName;
this.fieldsMapping = fieldsMapping;
for (FieldEntry fieldEntry : this.fieldsMapping.values()) {
fieldEntry.parentField = this;
}
this.isEmbedded = true;
}
protected String getFieldName() {
if (parentField != null) {
return parentField.getFieldName() + "." + fieldName;
}
return fieldName;
}
}
} | Incorrect system information for string id entity cuba-platform/cuba#2996
| modules/core/src/com/haulmont/cuba/core/app/entitysql/EntitySqlGenerator.java | Incorrect system information for string id entity cuba-platform/cuba#2996 |
|
Java | apache-2.0 | b75dea0e85299337a8b891cd234ba17a8baa1901 | 0 | osmdroid/osmdroid,osmdroid/osmdroid,sibext/osmdroid-1,1nv4d3r5/osmdroid,microg/android_external_osmdroid,osmdroid/osmdroid,GeoODK/osmdroid,hyl1987419/osmdroid,beemogmbh/osmdroid,DT9/osmdroid,mozilla/osmdroid,hyl1987419/osmdroid,dozd/osmdroid,fpoyer/osmdroid,GeoODK/osmdroid,dozd/osmdroid,sibext/osmdroid-1,DShamaev/osmdroid,prembasumatary/osmdroid,osmdroid/osmdroid,microg/android_external_osmdroid,prembasumatary/osmdroid,fpoyer/osmdroid,beemogmbh/osmdroid,Sarfarazsajjad/osmdroid,ak-67/osmdroid,DShamaev/osmdroid,Sarfarazsajjad/osmdroid,1nv4d3r5/osmdroid,DT9/osmdroid,ak-67/osmdroid | // Created by plusminus on 18:23:16 - 25.09.2008
package org.andnav.osm.views.util;
import java.util.Random;
import org.andnav.osm.ResourceProxy;
import org.andnav.osm.tileprovider.OpenStreetMapTile;
/**
* The OpenStreetMapRendererInfo stores information about available tile servers.
* @author Nicolas Gramlich
*
*/
public enum OpenStreetMapRendererInfo {
OSMARENDER(ResourceProxy.string.osmarender, ".png", 0, 17, 8, CodeScheme.X_Y,"http://tah.openstreetmap.org/Tiles/tile/"),
MAPNIK(ResourceProxy.string.mapnik, ".png", 0, 18, 8, CodeScheme.X_Y,"http://tile.openstreetmap.org/"),
CYCLEMAP(ResourceProxy.string.cyclemap, ".png", 0, 17, 8, CodeScheme.X_Y,
"http://a.andy.sandbox.cloudmade.com/tiles/cycle/",
"http://b.andy.sandbox.cloudmade.com/tiles/cycle/",
"http://c.andy.sandbox.cloudmade.com/tiles/cycle/"),
OPENARIELMAP(ResourceProxy.string.openareal_sat, ".jpg", 0, 13, 8, CodeScheme.X_Y,"http://tile.openaerialmap.org/tiles/1.0.0/openaerialmap-900913/"),
BASE(ResourceProxy.string.base, ".png", 4, 17, 8, CodeScheme.X_Y,"http://topo.openstreetmap.de/base/"),
TOPO(ResourceProxy.string.topo, ".png", 4, 17, 8, CodeScheme.X_Y,"http://topo.openstreetmap.de/topo/"),
HILLS(ResourceProxy.string.hills, ".png", 8, 17, 8, CodeScheme.X_Y,"http://topo.geofabrik.de/hills/"),
CLOUDMADESMALLTILES(ResourceProxy.string.cloudmade_small, ".png", 0, 13, 6, CodeScheme.X_Y,"http://tile.cloudmade.com/BC9A493B41014CAABB98F0471D759707/2/64/"),
CLOUDMADESTANDARDTILES(ResourceProxy.string.cloudmade_standard, ".png", 0, 18, 8, CodeScheme.X_Y,"http://tile.cloudmade.com/BC9A493B41014CAABB98F0471D759707/2/256/");
// ===========================================================
// Fields
// ===========================================================
public enum CodeScheme { X_Y, QUAD_TREE };
public final ResourceProxy.string NAME;
public final String BASEURLS[], IMAGE_FILENAMEENDING;
public final int ZOOM_MINLEVEL, ZOOM_MAXLEVEL, MAPTILE_ZOOM, MAPTILE_SIZEPX;
public final CodeScheme CODE_SCHEME;
private final Random random;
// ===========================================================
// Constructors
// ===========================================================
private OpenStreetMapRendererInfo(final ResourceProxy.string aName,
final String aImageFilenameEnding, final int aZoomMin,
final int aZoomMax, final int aTileZoom, final CodeScheme aCodeScheme,final String ...aBaseUrl) {
this.BASEURLS = aBaseUrl;
this.NAME = aName;
this.ZOOM_MINLEVEL = aZoomMin;
this.ZOOM_MAXLEVEL = aZoomMax;
this.IMAGE_FILENAMEENDING = aImageFilenameEnding;
this.MAPTILE_ZOOM = aTileZoom;
this.MAPTILE_SIZEPX = 1<<aTileZoom;
this.CODE_SCHEME = aCodeScheme;
this.random = new Random();
}
public static OpenStreetMapRendererInfo getDefault() {
return MAPNIK;
}
// ===========================================================
// Methods
// ===========================================================
public String getTileURLString(final OpenStreetMapTile aTile) {
final CodeScheme cs = this.CODE_SCHEME;
final String baseurl = BASEURLS[random.nextInt()%BASEURLS.length];
switch (cs) {
case QUAD_TREE:
return baseurl + quadTree(aTile) + IMAGE_FILENAMEENDING;
case X_Y:
default:
return baseurl + aTile.getZoomLevel() + "/" + aTile.getX() + "/" + aTile.getY() + IMAGE_FILENAMEENDING;
}
}
/**
* Converts TMS tile coordinates to QuadTree
* @param aTile The tile coordinates to convert
* @return The QuadTree as String.
*/
private String quadTree(final OpenStreetMapTile aTile) {
final StringBuilder quadKey = new StringBuilder();
for (int i = aTile.getZoomLevel(); i > 0; i--) {
int digit = 0;
int mask = 1 << (i - 1);
if ((aTile.getX() & mask) != 0)
digit += 1;
if ((aTile.getY() & mask) != 0)
digit += 2;
quadKey.append("" + digit);
}
return quadKey.toString();
}
}
| osmdroid-android/src/org/andnav/osm/views/util/OpenStreetMapRendererInfo.java | // Created by plusminus on 18:23:16 - 25.09.2008
package org.andnav.osm.views.util;
import java.util.Random;
import org.andnav.osm.ResourceProxy;
import org.andnav.osm.tileprovider.OpenStreetMapTile;
/**
* The OpenStreetMapRendererInfo stores information about available tile servers.
* @author Nicolas Gramlich
*
*/
public enum OpenStreetMapRendererInfo {
OSMARENDER(ResourceProxy.string.osmarender, ".png", 0, 17, 8, CodeScheme.X_Y,"http://tah.openstreetmap.org/Tiles/tile/"),
MAPNIK(ResourceProxy.string.mapnik, ".png", 0, 18, 8, CodeScheme.X_Y,"http://tile.openstreetmap.org/"),
CYCLEMAP(ResourceProxy.string.cyclemap, ".png", 0, 17, 8, CodeScheme.X_Y,
"http://a.andy.sandbox.cloudmade.com/tiles/cycle/",
"http://b.andy.sandbox.cloudmade.com/tiles/cycle/",
"http://c.andy.sandbox.cloudmade.com/tiles/cycle/"),
OPENARIELMAP(ResourceProxy.string.openareal_sat, ".jpg", 0, 13, 8, CodeScheme.X_Y,"http://tile.openaerialmap.org/tiles/1.0.0/openaerialmap-900913/"),
BASE(ResourceProxy.string.base, ".png", 4, 17, 8, CodeScheme.X_Y,"http://topo.openstreetmap.de/base/"),
TOPO(ResourceProxy.string.topo, ".png", 4, 17, 8, CodeScheme.X_Y,"http://topo.openstreetmap.de/topo/"),
HILLS(ResourceProxy.string.hills, ".png", 8, 17, 8, CodeScheme.X_Y,"http://topo.geofabrik.de/hills/"),
CLOUDMADESMALLTILES(ResourceProxy.string.cloudmade_small, ".png", 0, 13, 6, CodeScheme.X_Y,"http://tile.cloudmade.com/BC9A493B41014CAABB98F0471D759707/2/64/"),
CLOUDMADESTANDARDTILES(ResourceProxy.string.cloudmade_standard, ".png", 0, 18, 8, CodeScheme.X_Y,"http://tile.cloudmade.com/BC9A493B41014CAABB98F0471D759707/2/256/");
// ===========================================================
// Fields
// ===========================================================
public enum CodeScheme { X_Y, QUAD_TREE };
public final ResourceProxy.string NAME;
public final String BASEURLS[], IMAGE_FILENAMEENDING;
public final int ZOOM_MINLEVEL, ZOOM_MAXLEVEL, MAPTILE_ZOOM, MAPTILE_SIZEPX;
public final CodeScheme CODE_SCHEME;
private final Random random;
// ===========================================================
// Constructors
// ===========================================================
private OpenStreetMapRendererInfo(final ResourceProxy.string aName,
final String aImageFilenameEnding, final int aZoomMin,
final int aZoomMax, final int aTileZoom, final CodeScheme aCodeScheme,final String ...aBaseUrl) {
this.BASEURLS = aBaseUrl;
this.NAME = aName;
this.ZOOM_MINLEVEL = aZoomMin;
this.ZOOM_MAXLEVEL = aZoomMax;
this.IMAGE_FILENAMEENDING = aImageFilenameEnding;
this.MAPTILE_ZOOM = aTileZoom;
this.MAPTILE_SIZEPX = 1<<aTileZoom;
this.CODE_SCHEME = aCodeScheme;
this.random = new Random();
}
public static OpenStreetMapRendererInfo getDefault() {
return MAPNIK;
}
// ===========================================================
// Methods
// ===========================================================
public String getTileURLString(final OpenStreetMapTile aTile) {
final CodeScheme cs = this.CODE_SCHEME;
final String baseurl = BASEURLS[random.nextInt()%BASEURLS.length];
switch (cs) {
case QUAD_TREE:
return String.format("%s%s%s", baseurl, quadTree(aTile), this.IMAGE_FILENAMEENDING);
case X_Y:
default:
return String.format("%s%d/%d/%d%s", baseurl, aTile.getZoomLevel(), aTile.getX(), aTile.getY(), this.IMAGE_FILENAMEENDING);
}
}
/**
* Converts TMS tile coordinates to QuadTree
* @param aTile The tile coordinates to convert
* @return The QuadTree as String.
*/
private String quadTree(final OpenStreetMapTile aTile) {
final StringBuilder quadKey = new StringBuilder();
for (int i = aTile.getZoomLevel(); i > 0; i--) {
int digit = 0;
int mask = 1 << (i - 1);
if ((aTile.getX() & mask) != 0)
digit += 1;
if ((aTile.getY() & mask) != 0)
digit += 2;
quadKey.append("" + digit);
}
return quadKey.toString();
}
}
| Just concatenate strings instead of using String.format - that's much more efficient.
| osmdroid-android/src/org/andnav/osm/views/util/OpenStreetMapRendererInfo.java | Just concatenate strings instead of using String.format - that's much more efficient. |
|
Java | apache-2.0 | 91b4f42bb1fab55aea8c55a69ad0a8a314c558b0 | 0 | xingh/terrastore,est/terrastore,byzhang/terrastore,xingh/terrastore,est/terrastore,byzhang/terrastore,xingh/terrastore,est/terrastore,byzhang/terrastore,byzhang/terrastore,est/terrastore,xingh/terrastore | /**
* Copyright 2009 - 2010 Sergio Bossa ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package terrastore.store;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Map;
import terrastore.store.features.Mapper;
import terrastore.store.features.Predicate;
import terrastore.store.features.Update;
import terrastore.store.operators.Condition;
import terrastore.store.operators.Function;
import terrastore.util.io.IOUtils;
import terrastore.util.json.JsonUtils;
/**
* Json value object contained by {@link Bucket} instances.
*
* @author Sergio Bossa
*/
public class Value implements Serializable {
private static final long serialVersionUID = 12345678901L;
private static final Charset CHARSET = Charset.forName("UTF-8");
//
private final byte[] bytes;
private final boolean compressed;
public Value(byte[] bytes) {
this.bytes = bytes;
this.compressed = IOUtils.isCompressed(bytes);
}
public final byte[] getBytes() {
try {
if (compressed) {
return IOUtils.readCompressed(new ByteArrayInputStream(bytes));
} else {
return bytes;
}
} catch (IOException ex) {
throw new IllegalStateException(ex.getMessage(), ex);
}
}
public final byte[] getCompressedBytes() {
try {
if (compressed) {
return bytes;
} else {
return IOUtils.readAndCompress(new ByteArrayInputStream(bytes));
}
} catch (IOException ex) {
throw new IllegalStateException(ex.getMessage(), ex);
}
}
public final InputStream getInputStream() {
try {
if (compressed) {
return IOUtils.getCompressedInputStream(bytes);
} else {
return new ByteArrayInputStream(bytes);
}
} catch (IOException ex) {
throw new IllegalStateException(ex.getMessage(), ex);
}
}
public final Map<String, Object> dispatch(Key key, Mapper mapper, Function function) {
return function.apply(key.toString(), JsonUtils.toModifiableMap(this), mapper.getParameters());
}
public final Value dispatch(Key key, Update update, Function function) {
return JsonUtils.fromMap(function.apply(key.toString(), JsonUtils.toModifiableMap(this), update.getParameters()));
}
public final boolean dispatch(Key key, Predicate predicate, Condition condition) {
return condition.isSatisfied(key.toString(), JsonUtils.toUnmodifiableMap(this), predicate.getConditionExpression());
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Value) {
Value other = (Value) obj;
return Arrays.equals(other.bytes, this.bytes);
} else {
return false;
}
}
@Override
public int hashCode() {
return bytes.hashCode();
}
@Override
public String toString() {
try {
if (!compressed) {
return new String(bytes, CHARSET);
} else {
return new String(IOUtils.readCompressed(new ByteArrayInputStream(bytes)), CHARSET);
}
} catch (IOException ex) {
throw new IllegalStateException(ex.getMessage(), ex);
}
}
}
| src/main/java/terrastore/store/Value.java | /**
* Copyright 2009 - 2010 Sergio Bossa ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package terrastore.store;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Map;
import terrastore.store.features.Mapper;
import terrastore.store.features.Predicate;
import terrastore.store.features.Update;
import terrastore.store.operators.Condition;
import terrastore.store.operators.Function;
import terrastore.util.io.IOUtils;
import terrastore.util.json.JsonUtils;
/**
* Json value object contained by {@link Bucket} instances.
*
* @author Sergio Bossa
*/
public class Value implements Serializable {
private static final long serialVersionUID = 12345678901L;
private static final Charset CHARSET = Charset.forName("UTF-8");
//
private final byte[] bytes;
private final boolean compressed;
public Value(byte[] bytes) {
this.bytes = bytes;
this.compressed = IOUtils.isCompressed(bytes);
}
public final byte[] getBytes() {
try {
if (compressed) {
return IOUtils.readCompressed(new ByteArrayInputStream(bytes));
} else {
return bytes;
}
} catch (IOException ex) {
throw new IllegalStateException(ex.getMessage(), ex);
}
}
public final byte[] getCompressedBytes() {
try {
if (compressed) {
return bytes;
} else {
return IOUtils.readAndCompress(new ByteArrayInputStream(bytes));
}
} catch (IOException ex) {
throw new IllegalStateException(ex.getMessage(), ex);
}
}
public final InputStream getInputStream() {
try {
if (compressed) {
return IOUtils.getCompressedInputStream(bytes);
} else {
return new ByteArrayInputStream(bytes);
}
} catch (IOException ex) {
throw new IllegalStateException(ex.getMessage(), ex);
}
}
public final Map<String, Object> dispatch(Key key, Mapper mapper, Function function) {
return function.apply(key.toString(), JsonUtils.toModifiableMap(this), mapper.getParameters());
}
public final Value dispatch(Key key, Update update, Function function) {
return JsonUtils.fromMap(function.apply(key.toString(), JsonUtils.toModifiableMap(this), update.getParameters()));
}
public final boolean dispatch(Key key, Predicate predicate, Condition condition) {
return condition.isSatisfied(key.toString(), JsonUtils.toUnmodifiableMap(this), predicate.getConditionExpression());
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Value) {
Value other = (Value) obj;
return Arrays.equals(other.bytes, this.bytes);
} else {
return false;
}
}
@Override
public int hashCode() {
return bytes.hashCode();
}
@Override
public String toString() {
return new String(bytes, CHARSET);
}
}
| Fixed Value#toString method.
| src/main/java/terrastore/store/Value.java | Fixed Value#toString method. |
|
Java | apache-2.0 | ecce1b56e93e2bf32220cd2dcc6186ab8b311ebd | 0 | senseidb/sensei,senseidb/sensei,javasoze/sensei,javasoze/sensei,javasoze/sensei,senseidb/sensei,senseidb/sensei,javasoze/sensei | package com.senseidb.svc.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.log4j.Logger;
import org.apache.lucene.util.NamedThreadFactory;
import org.jboss.netty.util.internal.ConcurrentHashMap;
import proj.zoie.api.IndexReaderFactory;
import proj.zoie.api.ZoieIndexReader;
import com.browseengine.bobo.api.BoboIndexReader;
import com.linkedin.norbert.network.Serializer;
import com.senseidb.metrics.MetricsConstants;
import com.senseidb.search.node.SenseiCore;
import com.senseidb.search.node.SenseiQueryBuilderFactory;
import com.senseidb.search.req.AbstractSenseiRequest;
import com.senseidb.search.req.AbstractSenseiResult;
import com.senseidb.search.req.ErrorType;
import com.senseidb.search.req.SenseiError;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Meter;
import com.yammer.metrics.core.MetricName;
import com.yammer.metrics.core.Timer;
public abstract class AbstractSenseiCoreService<Req extends AbstractSenseiRequest,Res extends AbstractSenseiResult>{
private final static Logger logger = Logger.getLogger(AbstractSenseiCoreService.class);
private static Timer GetReaderTimer = null;
private static Timer SearchTimer = null;
private static Timer MergeTimer = null;
private static Meter SearchCounter = null;
static{
// register jmx monitoring for timers
try{
MetricName getReaderMetricName = new MetricName(MetricsConstants.Domain,"timer","getreader-time","node");
GetReaderTimer = Metrics.newTimer(getReaderMetricName,TimeUnit.MILLISECONDS,TimeUnit.SECONDS);
MetricName searchMetricName = new MetricName(MetricsConstants.Domain,"timer","search-time","node");
SearchTimer = Metrics.newTimer(searchMetricName,TimeUnit.MILLISECONDS,TimeUnit.SECONDS);
MetricName mergeMetricName = new MetricName(MetricsConstants.Domain,"timer","merge-time","node");
MergeTimer = Metrics.newTimer(mergeMetricName,TimeUnit.MILLISECONDS,TimeUnit.SECONDS);
MetricName searchCounterMetricName = new MetricName(MetricsConstants.Domain,"meter","search-count","node");
SearchCounter = Metrics.newMeter(searchCounterMetricName, "requets", TimeUnit.SECONDS);
}
catch(Exception e){
logger.error(e.getMessage(),e);
}
}
protected long _timeout = 8000;
protected final SenseiCore _core;
private final NamedThreadFactory threadFactory = new NamedThreadFactory("parallel-searcher");
private final ExecutorService _executorService = Executors.newCachedThreadPool(threadFactory);
private final Map<Integer,Timer> partitionTimerMetricMap = new HashMap<Integer,Timer>();
public AbstractSenseiCoreService(SenseiCore core){
_core = core;
int[] partitions = _core.getPartitions();
}
private Timer buildTimer(int partition) {
MetricName partitionSearchMetricName = new MetricName(MetricsConstants.Domain,"timer","partition-time-"+partition,"partition");
return Metrics.newTimer(partitionSearchMetricName,TimeUnit.MILLISECONDS,TimeUnit.SECONDS);
}
private Timer getTimer(int partition) {
Timer timer = partitionTimerMetricMap.get(partition);
if(timer == null) {
partitionTimerMetricMap.put(partition, buildTimer(partition));
return getTimer(partition);
}
return timer;
}
public final Res execute(final Req senseiReq){
SearchCounter.mark();
Set<Integer> partitions = senseiReq==null ? null : senseiReq.getPartitions();
if (partitions==null){
partitions = new HashSet<Integer>();
int[] containsPart = _core.getPartitions();
if (containsPart!=null){
for (int part : containsPart){
partitions.add(part);
}
}
}
Res finalResult;
if (partitions != null && partitions.size() > 0)
{
if (logger.isDebugEnabled()){
logger.debug("serving partitions: " + partitions.toString());
}
//we need to release index readers from all partitions only after the merge step
final Map<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>, List<ZoieIndexReader<BoboIndexReader>>> indexReaderCache = new ConcurrentHashMap<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>, List<ZoieIndexReader<BoboIndexReader>>>();
try {
final ArrayList<Res> resultList = new ArrayList<Res>(partitions.size());
Future<Res>[] futures = new Future[partitions.size()-1];
int i = 0;
for (final int partition : partitions)
{
final long start = System.currentTimeMillis();
final IndexReaderFactory<ZoieIndexReader<BoboIndexReader>> readerFactory = _core.getIndexReaderFactory(partition);
if (i < partitions.size() - 1) // Search simultaneously.
{
try
{
futures[i] = (Future<Res>)_executorService.submit(new Callable<Res>()
{
public Res call() throws Exception
{
Timer timer = getTimer(partition);
Res res = timer.time(new Callable<Res>(){
@Override
public Res call() throws Exception {
return handleRequest(senseiReq, readerFactory, _core.getQueryBuilderFactory(), indexReaderCache);
}
});
long end = System.currentTimeMillis();
res.setTime(end - start);
logger.info("searching partition: " + partition + " browse took: " + res.getTime());
return res;
}
});
} catch (Exception e)
{
senseiReq.addError(new SenseiError(e.getMessage(), ErrorType.BoboExecutionError));
logger.error(e.getMessage(), e);
}
}
else // Reuse current thread.
{
try
{
Timer timer = getTimer(partition);
Res res = timer.time(new Callable<Res>(){
@Override
public Res call() throws Exception {
return handleRequest(senseiReq, readerFactory, _core.getQueryBuilderFactory(), indexReaderCache);
}
});
resultList.add(res);
long end = System.currentTimeMillis();
res.setTime(end - start);
logger.info("searching partition: " + partition + " browse took: " + res.getTime());
} catch (Exception e)
{
logger.error(e.getMessage(), e);
senseiReq.addError(new SenseiError(e.getMessage(), ErrorType.BoboExecutionError));
resultList.add(getEmptyResultInstance(e));
}
}
++i;
}
for (i=0; i<futures.length; ++i)
{
try
{
Res res = futures[i].get(_timeout, TimeUnit.MILLISECONDS);
resultList.add(res);
}
catch(Exception e)
{
logger.error(e.getMessage(), e);
if (e instanceof TimeoutException) {
senseiReq.addError(new SenseiError(e.getMessage(), ErrorType.ExecutionTimeout));
} else {
senseiReq.addError(new SenseiError(e.getMessage(), ErrorType.BoboExecutionError));
}
resultList.add(getEmptyResultInstance(e));
}
}
try{
finalResult = MergeTimer.time(new Callable<Res>(){
public Res call() throws Exception{
return mergePartitionedResults(senseiReq, resultList);
}
});
}
catch(Exception e){
logger.error(e.getMessage(),e);
finalResult = getEmptyResultInstance(null);
finalResult.addError(new SenseiError(e.getMessage(), ErrorType.MergePartitionError));
}
} finally {
returnIndexReaders(indexReaderCache);
}
}
else
{
if (logger.isInfoEnabled()){
logger.info("no partitions specified");
}
finalResult = getEmptyResultInstance(null);
finalResult.addError(new SenseiError("no partitions specified", ErrorType.PartitionCallError));
}
if (logger.isInfoEnabled()){
logger.info("searching partitions: " + String.valueOf(partitions) + "; route by: " + senseiReq.getRouteParam() + "; took: " + finalResult.getTime());
}
return finalResult;
}
private void returnIndexReaders(Map<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>, List<ZoieIndexReader<BoboIndexReader>>> indexReaderCache) {
for (IndexReaderFactory<ZoieIndexReader<BoboIndexReader>> indexReaderFactory : indexReaderCache.keySet()) {
indexReaderFactory.returnIndexReaders(indexReaderCache.get(indexReaderFactory));
}
}
private final Res handleRequest(final Req senseiReq, final IndexReaderFactory<ZoieIndexReader<BoboIndexReader>> readerFactory,
final SenseiQueryBuilderFactory queryBuilderFactory,
Map<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>, List<ZoieIndexReader<BoboIndexReader>>> indexReadersToCleanUp) throws Exception {
List<ZoieIndexReader<BoboIndexReader>> readerList = null;
readerList = GetReaderTimer.time(new Callable<List<ZoieIndexReader<BoboIndexReader>>>() {
public List<ZoieIndexReader<BoboIndexReader>> call() throws Exception {
if (readerFactory == null)
return Collections.EMPTY_LIST;
return readerFactory.getIndexReaders();
}
});
if (logger.isDebugEnabled()) {
logger.debug("obtained readerList of size: " + readerList == null ? 0 : readerList.size());
}
if (readerFactory != null && readerList != null) {
indexReadersToCleanUp.put(readerFactory, readerList);
}
final List<BoboIndexReader> boboReaders = ZoieIndexReader.extractDecoratedReaders(readerList);
return SearchTimer.time(new Callable<Res>() {
public Res call() throws Exception {
return handlePartitionedRequest(senseiReq, boboReaders, queryBuilderFactory);
}
});
}
public abstract Res handlePartitionedRequest(Req r,final List<BoboIndexReader> readerList,SenseiQueryBuilderFactory queryBuilderFactory) throws Exception;
public abstract Res mergePartitionedResults(Req r,List<Res> reqList);
public abstract Res getEmptyResultInstance(Throwable error);
public abstract Serializer<Req, Res> getSerializer();
}
| sensei-core/src/main/java/com/senseidb/svc/impl/AbstractSenseiCoreService.java | package com.senseidb.svc.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.log4j.Logger;
import org.apache.lucene.util.NamedThreadFactory;
import org.jboss.netty.util.internal.ConcurrentHashMap;
import proj.zoie.api.IndexReaderFactory;
import proj.zoie.api.ZoieIndexReader;
import com.browseengine.bobo.api.BoboIndexReader;
import com.linkedin.norbert.network.Serializer;
import com.senseidb.metrics.MetricsConstants;
import com.senseidb.search.node.SenseiCore;
import com.senseidb.search.node.SenseiQueryBuilderFactory;
import com.senseidb.search.req.AbstractSenseiRequest;
import com.senseidb.search.req.AbstractSenseiResult;
import com.senseidb.search.req.ErrorType;
import com.senseidb.search.req.SenseiError;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Meter;
import com.yammer.metrics.core.MetricName;
import com.yammer.metrics.core.Timer;
public abstract class AbstractSenseiCoreService<Req extends AbstractSenseiRequest,Res extends AbstractSenseiResult>{
private final static Logger logger = Logger.getLogger(AbstractSenseiCoreService.class);
private static Timer GetReaderTimer = null;
private static Timer SearchTimer = null;
private static Timer MergeTimer = null;
private static Meter SearchCounter = null;
static{
// register jmx monitoring for timers
try{
MetricName getReaderMetricName = new MetricName(MetricsConstants.Domain,"timer","getreader-time","node");
GetReaderTimer = Metrics.newTimer(getReaderMetricName,TimeUnit.MILLISECONDS,TimeUnit.SECONDS);
MetricName searchMetricName = new MetricName(MetricsConstants.Domain,"timer","search-time","node");
SearchTimer = Metrics.newTimer(searchMetricName,TimeUnit.MILLISECONDS,TimeUnit.SECONDS);
MetricName mergeMetricName = new MetricName(MetricsConstants.Domain,"timer","merge-time","node");
MergeTimer = Metrics.newTimer(mergeMetricName,TimeUnit.MILLISECONDS,TimeUnit.SECONDS);
MetricName searchCounterMetricName = new MetricName(MetricsConstants.Domain,"meter","search-count","node");
SearchCounter = Metrics.newMeter(searchCounterMetricName, "requets", TimeUnit.SECONDS);
}
catch(Exception e){
logger.error(e.getMessage(),e);
}
}
protected long _timeout = 8000;
protected final SenseiCore _core;
private final NamedThreadFactory threadFactory = new NamedThreadFactory("parallel-searcher");
private final ExecutorService _executorService = Executors.newCachedThreadPool(threadFactory);
private final Map<Integer,Timer> partitionTimerMetricMap = new HashMap<Integer,Timer>();
public AbstractSenseiCoreService(SenseiCore core){
_core = core;
int[] partitions = _core.getPartitions();
}
private Timer buildTimer(int partition) {
MetricName partitionSearchMetricName = new MetricName(MetricsConstants.Domain,"timer","partition-time-"+partition,"partition");
return Metrics.newTimer(partitionSearchMetricName,TimeUnit.MILLISECONDS,TimeUnit.SECONDS);
}
private Timer getTimer(int partition) {
Timer timer = partitionTimerMetricMap.get(partition);
if(timer == null) {
partitionTimerMetricMap.put(partition, buildTimer(partition));
return getTimer(partition);
}
return timer;
}
public final Res execute(final Req senseiReq){
SearchCounter.mark();
Set<Integer> partitions = senseiReq==null ? null : senseiReq.getPartitions();
if (partitions==null){
partitions = new HashSet<Integer>();
int[] containsPart = _core.getPartitions();
if (containsPart!=null){
for (int part : containsPart){
partitions.add(part);
}
}
}
Res finalResult;
if (partitions != null && partitions.size() > 0)
{
if (logger.isDebugEnabled()){
logger.debug("serving partitions: " + partitions.toString());
}
//we need to release index readers from all partitions only after the merge step
final Map<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>, List<ZoieIndexReader<BoboIndexReader>>> indexReaderCache = new ConcurrentHashMap<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>, List<ZoieIndexReader<BoboIndexReader>>>();
try {
final ArrayList<Res> resultList = new ArrayList<Res>(partitions.size());
Future<Res>[] futures = new Future[partitions.size()-1];
int i = 0;
for (final int partition : partitions)
{
final long start = System.currentTimeMillis();
final IndexReaderFactory<ZoieIndexReader<BoboIndexReader>> readerFactory = _core.getIndexReaderFactory(partition);
if (i < partitions.size() - 1) // Search simultaneously.
{
try
{
futures[i] = (Future<Res>)_executorService.submit(new Callable<Res>()
{
public Res call() throws Exception
{
Timer timer = getTimer(partition);
Res res = timer.time(new Callable<Res>(){
@Override
public Res call() throws Exception {
return handleRequest(senseiReq, readerFactory, _core.getQueryBuilderFactory(), indexReaderCache);
}
});
long end = System.currentTimeMillis();
res.setTime(end - start);
logger.info("searching partition: " + partition + " browse took: " + res.getTime());
return res;
}
});
} catch (Exception e)
{
senseiReq.addError(new SenseiError(e.getMessage(), ErrorType.BoboExecutionError));
logger.error(e.getMessage(), e);
}
}
else // Reuse current thread.
{
try
{
Timer timer = getTimer(partition);
Res res = timer.time(new Callable<Res>(){
@Override
public Res call() throws Exception {
return handleRequest(senseiReq, readerFactory, _core.getQueryBuilderFactory(), indexReaderCache);
}
});
resultList.add(res);
long end = System.currentTimeMillis();
res.setTime(end - start);
logger.info("searching partition: " + partition + " browse took: " + res.getTime());
} catch (Exception e)
{
logger.error(e.getMessage(), e);
senseiReq.addError(new SenseiError(e.getMessage(), ErrorType.BoboExecutionError));
resultList.add(getEmptyResultInstance(e));
}
}
++i;
}
for (i=0; i<futures.length; ++i)
{
try
{
Res res = futures[i].get(_timeout, TimeUnit.MILLISECONDS);
resultList.add(res);
}
catch(Exception e)
{
logger.error(e.getMessage(), e);
if (e instanceof TimeoutException) {
senseiReq.addError(new SenseiError(e.getMessage(), ErrorType.ExecutionTimeout));
} else {
senseiReq.addError(new SenseiError(e.getMessage(), ErrorType.BoboExecutionError));
}
resultList.add(getEmptyResultInstance(e));
}
}
try{
finalResult = MergeTimer.time(new Callable<Res>(){
public Res call() throws Exception{
return mergePartitionedResults(senseiReq, resultList);
}
});
}
catch(Exception e){
logger.error(e.getMessage(),e);
finalResult = getEmptyResultInstance(null);
finalResult.addError(new SenseiError(e.getMessage(), ErrorType.MergePartitionError));
}
} finally {
returnIndexReaders(indexReaderCache);
}
}
else
{
if (logger.isInfoEnabled()){
logger.info("no partitions specified");
}
finalResult = getEmptyResultInstance(null);
finalResult.addError(new SenseiError("no partitions specified", ErrorType.PartitionCallError));
}
if (logger.isInfoEnabled()){
logger.info("searching partitions " + String.valueOf(partitions) + " took: " + finalResult.getTime());
}
return finalResult;
}
private void returnIndexReaders(Map<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>, List<ZoieIndexReader<BoboIndexReader>>> indexReaderCache) {
for (IndexReaderFactory<ZoieIndexReader<BoboIndexReader>> indexReaderFactory : indexReaderCache.keySet()) {
indexReaderFactory.returnIndexReaders(indexReaderCache.get(indexReaderFactory));
}
}
private final Res handleRequest(final Req senseiReq, final IndexReaderFactory<ZoieIndexReader<BoboIndexReader>> readerFactory,
final SenseiQueryBuilderFactory queryBuilderFactory,
Map<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>, List<ZoieIndexReader<BoboIndexReader>>> indexReadersToCleanUp) throws Exception {
List<ZoieIndexReader<BoboIndexReader>> readerList = null;
readerList = GetReaderTimer.time(new Callable<List<ZoieIndexReader<BoboIndexReader>>>() {
public List<ZoieIndexReader<BoboIndexReader>> call() throws Exception {
if (readerFactory == null)
return Collections.EMPTY_LIST;
return readerFactory.getIndexReaders();
}
});
if (logger.isDebugEnabled()) {
logger.debug("obtained readerList of size: " + readerList == null ? 0 : readerList.size());
}
if (readerFactory != null && readerList != null) {
indexReadersToCleanUp.put(readerFactory, readerList);
}
final List<BoboIndexReader> boboReaders = ZoieIndexReader.extractDecoratedReaders(readerList);
return SearchTimer.time(new Callable<Res>() {
public Res call() throws Exception {
return handlePartitionedRequest(senseiReq, boboReaders, queryBuilderFactory);
}
});
}
public abstract Res handlePartitionedRequest(Req r,final List<BoboIndexReader> readerList,SenseiQueryBuilderFactory queryBuilderFactory) throws Exception;
public abstract Res mergePartitionedResults(Req r,List<Res> reqList);
public abstract Res getEmptyResultInstance(Throwable error);
public abstract Serializer<Req, Res> getSerializer();
}
| logging for route by param in node level.
| sensei-core/src/main/java/com/senseidb/svc/impl/AbstractSenseiCoreService.java | logging for route by param in node level. |
|
Java | apache-2.0 | 9756a5659d42a54f45acc7e447ef7195c11de28d | 0 | rest-assured/rest-assured,RocketRaccoon/rest-assured,rest-assured/rest-assured,paweld2/rest-assured,paweld2/rest-assured,rest-assured/rest-assured,jayway/rest-assured,BenSeverson/rest-assured,jayway/rest-assured,RocketRaccoon/rest-assured,BenSeverson/rest-assured | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.restassured.itest.java;
import io.restassured.RestAssured;
import io.restassured.builder.ResponseBuilder;
import io.restassured.config.LogConfig;
import io.restassured.filter.Filter;
import io.restassured.filter.FilterContext;
import io.restassured.filter.log.LogDetail;
import io.restassured.filter.log.RequestLoggingFilter;
import io.restassured.filter.log.ResponseLoggingFilter;
import io.restassured.http.ContentType;
import io.restassured.itest.java.objects.Greeting;
import io.restassured.itest.java.objects.Message;
import io.restassured.itest.java.objects.ScalatraObject;
import io.restassured.itest.java.support.WithJetty;
import io.restassured.parsing.Parser;
import io.restassured.response.Response;
import io.restassured.specification.FilterableRequestSpecification;
import io.restassured.specification.FilterableResponseSpecification;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.WriterOutputStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import static io.restassured.RestAssured.*;
import static io.restassured.config.EncoderConfig.encoderConfig;
import static io.restassured.config.RestAssuredConfig.config;
import static io.restassured.filter.log.ErrorLoggingFilter.logErrorsTo;
import static io.restassured.filter.log.LogDetail.COOKIES;
import static io.restassured.filter.log.RequestLoggingFilter.logRequestTo;
import static io.restassured.filter.log.ResponseLoggingFilter.logResponseTo;
import static io.restassured.filter.log.ResponseLoggingFilter.logResponseToIfMatches;
import static io.restassured.parsing.Parser.JSON;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class LoggingITest extends WithJetty {
public static final String LINE_SEPARATOR = System.getProperty("line.separator");
@Before
public void setup() throws Exception {
RestAssured.config = config().logConfig(LogConfig.logConfig().enablePrettyPrinting(false));
}
@After
public void teardown() throws Exception {
RestAssured.reset();
}
@Test
public void errorLoggingFilterWorks() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logErrorsTo(captor)).and().expect().body(equalTo("ERROR")).when().get("/409");
assertThat(writer.toString(), containsString("ERROR"));
}
@Test
public void logErrorsUsingRequestSpec() throws Exception {
expect().log().ifError().body(equalTo("ERROR")).when().get("/409");
}
@Test
public void logUsingRequestSpec() throws Exception {
given().log().everything().and().expect().body(equalTo("ERROR")).when().get("/409");
}
@Test
public void logUsingResponseSpec() throws Exception {
expect().log().everything().body(equalTo("ERROR")).when().get("/409");
}
@Test
public void logResponseThatHasCookiesWithLogDetailAll() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseTo(captor)).and().expect().body(equalTo("OK")).when().get("/multiCookie");
assertThat(writer.toString(), allOf(startsWith("HTTP/1.1 200 OK\nContent-Type: text/plain;charset=utf-8\nSet-Cookie: cookie1=cookieValue1;Domain=localhost\nExpires:"),
containsString("Set-Cookie: cookie1=cookieValue2;Version=1;Path=/;Domain=localhost;Expires="), endsWith(";Max-Age=1234567;Secure;Comment=\"My Purpose\"\nContent-Length: 2\nServer: Jetty(9.3.2.v20150730)\n\nOK" + LINE_SEPARATOR)));
}
@Test
public void logResponseThatHasCookiesWithLogDetailCookies() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseTo(captor, COOKIES)).and().expect().body(equalTo("OK")).when().get("/multiCookie");
assertThat(writer.toString(), allOf(startsWith("cookie1=cookieValue1;Domain=localhost\ncookie1=cookieValue2;Comment=\"My Purpose\";Path=/;Domain=localhost;Max-Age=1234567;Secure;Expires="), endsWith(";Version=1" + LINE_SEPARATOR)));
}
@Test
public void loggingResponseFilterLogsErrors() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseTo(captor)).and().expect().body(equalTo("ERROR")).when().get("/409");
assertThat(writer.toString(), containsString("ERROR"));
}
@Test
public void loggingResponseFilterLogsNonErrors() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseTo(captor)).expect().body("greeting", equalTo("Greetings John Doe")).when().get("/greet?firstName=John&lastName=Doe");
assertThat(writer.toString(), containsString("{\"greeting\":\"Greetings John Doe\"}"));
}
@Test
public void loggingResponseFilterLogsToSpecifiedWriterWhenMatcherIsFulfilled() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseToIfMatches(captor, equalTo(200))).expect().body("greeting", equalTo("Greetings John Doe")).when().get("/greet?firstName=John&lastName=Doe");
assertThat(writer.toString(), containsString("{\"greeting\":\"Greetings John Doe\"}"));
}
@Test
public void loggingResponseFilterDoesntLogWhenSpecifiedMatcherIsNotFulfilled() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseToIfMatches(captor, equalTo(400))).expect().body("greeting", equalTo("Greetings John Doe")).when().get("/greet?firstName=John&lastName=Doe");
assertThat(writer.toString(), is(""));
}
@Test
public void loggingResponseFilterLogsWhenExpectationsFail() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
try {
given().filter(logResponseTo(captor)).expect().body("greeting", equalTo("Greetings John Do")).when().get("/greet?firstName=John&lastName=Doe");
fail("Should throw exception");
} catch (AssertionError e) {
assertThat(writer.toString(), containsString("{\"greeting\":\"Greetings John Doe\"}"));
}
}
@Test
public void loggingRequestFilterWithParamsCookiesAndHeaders() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(captor)).
formParam("firstName", "John").
formParam("lastName", "Doe").
queryParam("something1", "else1").
queryParam("something2", "else2").
queryParam("something3", "else3").
param("hello1", "world1").
param("hello2", "world2").
param("multiParam", "multi1", "multi2").
cookie("multiCookie", "value1", "value2").
cookie("standardCookie", "standard value").
header("multiHeader", "headerValue1", "headerValue2").
header("standardHeader", "standard header value").
expect().
body("greeting", equalTo("Greetings John Doe")).
when().
post("/greet");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/greet?something1=else1&something2=else2&something3=else3\nProxy:\t\t\t<none>\nRequest params:\thello1=world1\n\t\t\t\thello2=world2\n\t\t\t\tmultiParam=[multi1, multi2]\nQuery params:\tsomething1=else1\n\t\t\t\tsomething2=else2\n\t\t\t\tsomething3=else3\nForm params:\tfirstName=John\n\t\t\t\tlastName=Doe\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tmultiHeader=headerValue1\n\t\t\t\tmultiHeader=headerValue2\n\t\t\t\tstandardHeader=standard header value\n\t\t\t\tAccept=*/*\n" +
"\t\t\t\tContent-Type=application/x-www-form-urlencoded; charset=" + RestAssured.config().getEncoderConfig().defaultContentCharset() +
"\nCookies:\t\tmultiCookie=value1\n\t\t\t\tmultiCookie=value2\n\t\t\t\tstandardCookie=standard value\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test(expected = IllegalArgumentException.class)
public void loggingRequestFilterDoesntAcceptStatusAsLogDetail() throws Exception {
new RequestLoggingFilter(LogDetail.STATUS);
}
@Test
public void loggingRequestFilterWithExplicitContentType() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(captor)).
param("firstName", "John").
param("lastName", "Doe").
header("Content-type", "application/json").
expect().
body("greeting", equalTo("Greetings John Doe")).
when().
get("/greet");
assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest URI:\thttp://localhost:8080/greet?firstName=John&lastName=Doe\nProxy:\t\t\t<none>\nRequest params:\tfirstName=John\n\t\t\t\tlastName=Doe\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/json; charset="+ RestAssured.config().getEncoderConfig().defaultCharsetForContentType(ContentType.JSON)+"\nCookies:\t\t<none>\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void loggingRequestFilterPathParams() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(captor)).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest URI:\thttp://localhost:8080/John/Doe\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\tfirstName=John\n\t\t\t\tlastName=Doe\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\nCookies:\t\t<none>\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void loggingRequestFilterWithBody() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final ScalatraObject object = new ScalatraObject();
object.setHello("Hello world");
given().filter(new RequestLoggingFilter(captor)).expect().defaultParser(JSON).given().body(object).when().post("/reflect");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR));
}
@Test
public void loggingRequestAndResponseAtTheSameTimeWhenRequestFilterIsAddedBeforeResponseFilter() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final ScalatraObject object = new ScalatraObject();
object.setHello("Hello world");
given().
filters(new RequestLoggingFilter(captor), new ResponseLoggingFilter(captor)).
body(object).
expect().
defaultParser(JSON).
when().
post("/reflect");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR + "HTTP/1.1 200 OK\nContent-Type: text/plain;charset=iso-8859-1\nContent-Length: 23\nServer: Jetty(9.3.2.v20150730)\n\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR));
}
@Test
public void loggingRequestAndResponseAtTheSameTimeWhenResponseFilterIsAddedBeforeRequestFilter() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final ScalatraObject object = new ScalatraObject();
object.setHello("Hello world");
given().
filters(new ResponseLoggingFilter(captor), new RequestLoggingFilter(captor)).
body(object).
expect().
defaultParser(JSON).
when().
post("/reflect");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR + "HTTP/1.1 200 OK\nContent-Type: text/plain;charset=iso-8859-1\nContent-Length: 23\nServer: Jetty(9.3.2.v20150730)\n\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR));
}
@Test
public void logEverythingResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(LogConfig.logConfig().defaultStream(captor).and().enablePrettyPrinting(false))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().all().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)\n\n{\"firstName\":\"John\",\"lastName\":\"Doe\",\"fullName\":\"John Doe\"}" + LINE_SEPARATOR));
}
@Test
public void logIfStatusCodeIsEqualToResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(LogConfig.logConfig().defaultStream(captor))).
expect().
log().ifStatusCodeIsEqualTo(409).
when().
get("/409");
assertThat(writer.toString(), equalTo("HTTP/1.1 409 Conflict\nContent-Type: text/plain;charset=utf-8\nContent-Length: 5\nServer: Jetty(9.3.2.v20150730)\n\nERROR" + LINE_SEPARATOR));
}
@Test
public void doesntLogIfStatusCodeIsNotEqualToResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(LogConfig.logConfig().defaultStream(captor))).
expect().
log().ifStatusCodeIsEqualTo(200).
when().
get("/409");
assertThat(writer.toString(), equalTo(""));
}
@Test
public void logIfStatusCodeMatchesResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(LogConfig.logConfig().defaultStream(captor))).
expect().
log().ifStatusCodeMatches(greaterThan(200)).
when().
get("/409");
assertThat(writer.toString(), equalTo("HTTP/1.1 409 Conflict\nContent-Type: text/plain;charset=utf-8\nContent-Length: 5\nServer: Jetty(9.3.2.v20150730)\n\nERROR" + LINE_SEPARATOR));
}
@Test
public void logOnlyBodyUsingResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, false))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().body().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("{\"firstName\":\"John\",\"lastName\":\"Doe\",\"fullName\":\"John Doe\"}" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithPrettyPrintingWhenJson() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().body().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("{\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"fullName\": \"John Doe\"\n}" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithPrettyPrintingWhenXml() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().body().
body("videos.music[0].title", is("Video Title 1")).
when().
get("/videos-not-formatted");
assertThat(writer.toString(), equalTo("<videos>\n <music>\n <title>Video Title 1</title>\n <artist>Artist 1</artist>\n </music>\n <music>\n <title>Video Title 2</title>\n <artist>Artist 2</artist>\n <artist>Artist 3</artist>\n </music>\n</videos>" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithPrettyPrintingWhenHtml() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().body().
body("html.head.title", is("my title")).
when().
get("/textHTML-not-formatted");
assertThat(writer.toString(), equalTo("<html>\n <head>\n <title>my title</title>\n </head>\n <body>\n <p>paragraph 1</p>\n <p>paragraph 2</p>\n </body>\n</html>" + LINE_SEPARATOR));
}
@Test
public void logAllWithPrettyPrintingWhenJson() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().all().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)\n\n{\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"fullName\": \"John Doe\"\n}" + LINE_SEPARATOR));
}
@Test
public void logAllWithPrettyPrintingUsingDSLWhenJson() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, false))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().all(true).
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)\n\n{\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"fullName\": \"John Doe\"\n}" + LINE_SEPARATOR));
}
@Test
public void logAllWithNoPrettyPrintingUsingDSLWhenJson() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().all(false).
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)\n\n{\"firstName\":\"John\",\"lastName\":\"Doe\",\"fullName\":\"John Doe\"}" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithPrettyPrintingUsingDSLWhenXml() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, false))).
expect().
log().body(true).
body("videos.music[0].title", is("Video Title 1")).
when().
get("/videos-not-formatted");
assertThat(writer.toString(), equalTo("<videos>\n <music>\n <title>Video Title 1</title>\n <artist>Artist 1</artist>\n </music>\n <music>\n <title>Video Title 2</title>\n <artist>Artist 2</artist>\n <artist>Artist 3</artist>\n </music>\n</videos>" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithNoPrettyPrintingUsingDSLWhenXml() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().body(false).
body("videos.music[0].title", is("Video Title 1")).
when().
get("/videos-not-formatted");
assertThat(writer.toString(), equalTo("<videos><music><title>Video Title 1</title><artist>Artist 1</artist></music><music><title>Video Title 2</title><artist>Artist 2</artist><artist>Artist 3</artist></music></videos>" + LINE_SEPARATOR));
}
@Test
public void logOnlyStatusUsingResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().status().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK" + LINE_SEPARATOR));
}
@Test
public void logOnlyHeadersUsingResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().headers().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("Content-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)" + LINE_SEPARATOR));
}
@Test
public void logOnlyHeadersUsingResponseUsingLogSpecWhenMultiHeaders() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().headers().
when().
get("/multiValueHeader");
assertThat(writer.toString(), equalTo("Content-Type: text/plain;charset=utf-8\nMultiHeader: Value 1\nMultiHeader: Value 2\nContent-Length: 0\nServer: Jetty(9.3.2.v20150730)" + LINE_SEPARATOR));
}
@Test
public void logOnlyCookiesUsingResponseLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().cookies().
when().
get("/multiCookie");
assertThat(writer.toString(), allOf(startsWith("cookie1=cookieValue1;Domain=localhost\ncookie1=cookieValue2;Comment=\"My Purpose\";Path=/;Domain=localhost;Max-Age=1234567;Secure;Expires="), endsWith(";Version=1" + LINE_SEPARATOR)));
}
@Test
public void logBodyPrettyPrintedUsingResponseLogSpecWhenContentTypeDoesntMatchContent() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().everything().
body(equalTo("This is not a valid JSON document")).
when().
get("/contentTypeJsonButContentIsNotJson");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 33\nServer: Jetty(9.3.2.v20150730)\n\nThis is not a valid JSON document" + LINE_SEPARATOR));
}
@Test
public void logAllUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().everything().
param("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greetJSON");
assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest URI:\thttp://localhost:8080/greetJSON?firstName=John&lastName=Doe\nProxy:\t\t\t<none>\nRequest params:\tfirstName=John\nQuery params:\tlastName=Doe\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\nCookies:\t\t<none>\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void logParamsUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().parameters().
param("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greetJSON");
assertThat(writer.toString(), equalTo("Request params:\tfirstName=John\nQuery params:\tlastName=Doe\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void logNoValueParamsUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().parameters().
formParam("formParam").
queryParam("queryParam").
when().
post("/noValueParam");
assertThat(writer.toString(), equalTo("Request params:\t<none>\nQuery params:\tqueryParam\nForm params:\tformParam\nPath params:\t<none>\nMultiparts:\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void logBodyUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().body().
param("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greetJSON");
assertThat(writer.toString(), equalTo("Body:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void logBodyWithPrettyPrintingUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
contentType(ContentType.JSON).
config(config().logConfig(new LogConfig(captor, true))).
log().body().
body("{ \"something\" : \"else\" }").
when().
post("/body");
assertThat(writer.toString(), equalTo("Body:\n{\n \"something\": \"else\"\n}" + LINE_SEPARATOR));
}
@Test
public void logBodyWithPrettyPrintingUsingDslAndRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
contentType(ContentType.JSON).
config(config().logConfig(new LogConfig(captor, false))).
log().body(true).
body("{ \"something\" : \"else\" }").
when().
post("/body");
assertThat(writer.toString(), equalTo("Body:\n{\n \"something\": \"else\"\n}" + LINE_SEPARATOR));
}
@Test
public void logBodyWithPrettyPrintingUsingRequestLogSpecAndObjectMapping() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final Message message = new Message();
message.setMessage("My message");
given().
contentType(ContentType.JSON).
config(config().logConfig(new LogConfig(captor, true))).
log().body().
body(message).
when().
post("/body");
assertThat(writer.toString(), equalTo("Body:\n{\n \"message\": \"My message\"\n}" + LINE_SEPARATOR));
}
@Test
public void logBodyWithPrettyPrintingUsingRequestLogSpecAndObjectMappingWhenXML() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final Greeting greeting = new Greeting();
greeting.setFirstName("John");
greeting.setLastName("Doe");
given().
contentType(ContentType.XML).
config(config().logConfig(new LogConfig(captor, true))).
log().body().
body(greeting).
when().
post("/body");
assertThat(writer.toString(), equalTo("Body:\n<greeting>\n <firstName>John</firstName>\n <lastName>Doe</lastName>\n</greeting>" + LINE_SEPARATOR));
}
@Test
public void logCookiesUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().cookies().
cookie("myCookie1", "myCookieValue1").
cookie("myCookie2", "myCookieValue2").
cookie("myMultiCookie", "myMultiCookieValue1", "myMultiCookieValue2").
when().
post("/reflect");
assertThat(writer.toString(), equalTo("Cookies:\t\tmyCookie1=myCookieValue1\n\t\t\t\tmyCookie2=myCookieValue2\n\t\t\t\tmyMultiCookie=myMultiCookieValue1\n\t\t\t\tmyMultiCookie=myMultiCookieValue2" + LINE_SEPARATOR));
}
@Test
public void logHeadersUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().headers().
header("myHeader1", "myHeaderValue1").
header("myHeader2", "myHeaderValue2").
header("myMultiHeader", "myMultiHeaderValue1", "myMultiHeaderValue2").
when().
get("/multiHeaderReflect");
assertThat(writer.toString(), equalTo("Headers:\t\tmyHeader1=myHeaderValue1\n\t\t\t\tmyHeader2=myHeaderValue2\n\t\t\t\tmyMultiHeader=myMultiHeaderValue1\n\t\t\t\tmyMultiHeader=myMultiHeaderValue2\n\t\t\t\tAccept=*/*" + LINE_SEPARATOR));
}
@Test
public void logBodyPrettyPrintedUsingRequestLogSpecWhenContentTypeDoesntMatchContent() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().everything().
contentType("application/json").
body("This is not JSON").
when().
post("/reflect");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/json; charset="+ RestAssured.config().getEncoderConfig().defaultCharsetForContentType(ContentType.JSON)+"\nCookies:\t\t<none>\nBody:\nThis is not JSON" + LINE_SEPARATOR));
}
@Test
public void logAllWhenBasePathIsDefinedUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
RestAssured.basePath = "/reflect";
try {
given().
config(config().logConfig(new LogConfig(captor, true))).
log().all().
body("hello").
when().
post("/");
} finally {
RestAssured.reset();
}
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect/\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\nhello" + LINE_SEPARATOR));
}
@Test
public void logAllWhenBaseURIIsDefinedUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
RestAssured.baseURI = "http://localhost:8080/reflect";
try {
given().
config(config().logConfig(new LogConfig(captor, true))).
log().all().
body("hello").
when().
post("/");
} finally {
RestAssured.reset();
}
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect/\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\nhello" + LINE_SEPARATOR));
}
@Test
public void logAllWhenBasePathAndBasePortAndBaseURIIsDefinedUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
RestAssured.baseURI = "http://localhost";
RestAssured.port = 8080;
RestAssured.basePath = "/reflect";
try {
given().
config(config().logConfig(new LogConfig(captor, true))).
log().all().
body("hello").
when().
post("/");
} finally {
RestAssured.reset();
}
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect/\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\nhello" + LINE_SEPARATOR));
}
@Test
public void logsFullyQualifiedUrlsAreLoggedCorrectly() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().all().
filter(new Filter() {
public Response filter(FilterableRequestSpecification requestSpec, FilterableResponseSpecification responseSpec, FilterContext ctx) {
return new ResponseBuilder().setStatusCode(200).setBody("changed").build();
}
}).get("http://www.beijingchina.net.cn/transportation/train/train-to-shanghai.html");
assertThat(writer.toString(), startsWith("Request method:\tGET\nRequest URI:\thttp://www.beijingchina.net.cn/transportation/train/train-to-shanghai.html"));
}
@Test
public void logsXmlNamespacesCorrectly() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(logResponseTo(captor)).
expect().
statusCode(200).
when().
get("/namespace-example");
assertThat(writer.toString(), containsString("foo xmlns:ns=\"http://localhost/\">\n <bar>sudo </bar>\n <ns:bar>make me a sandwich!</ns:bar>\n </foo>"));
}
@Test
public void logsMultiPartParamsOnLogAll() throws Exception {
// Given
final byte[] bytes = IOUtils.toByteArray(getClass().getResourceAsStream("/car-records.xsd"));
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
InputStream powermock = getClass().getResourceAsStream("/powermock-easymock-junit-1.4.12.zip");
// When
given().
filter(logResponseTo(captor)).
multiPart("file", "myFile", bytes).
multiPart("something", "testing", "text/plain").
multiPart("powermock", "powermock-1.4.12", powermock).
when().
post("/multipart/file").
then().
statusCode(200).
body(is(new String(bytes)));
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: text/plain;charset=utf-8\nContent-Length: 1512\nServer: Jetty(9.3.2.v20150730)\n\n<!--\n ~ Copyright 2013 the original author or authors.\n ~\n ~ Licensed under the Apache License, Version 2.0 (the \"License\");\n ~ you may not use this file except in compliance with the License.\n ~ You may obtain a copy of the License at\n ~\n ~ http://www.apache.org/licenses/LICENSE-2.0\n ~\n ~ Unless required by applicable law or agreed to in writing, software\n ~ distributed under the License is distributed on an \"AS IS\" BASIS,\n ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n ~ See the License for the specific language governing permissions and\n ~ limitations under the License.\n -->\n<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" elementFormDefault=\"qualified\">\n <xs:element name=\"records\">\n <xs:complexType>\n <xs:sequence>\n <xs:element maxOccurs=\"unbounded\" ref=\"car\"/>\n </xs:sequence>\n </xs:complexType>\n </xs:element>\n <xs:element name=\"car\">\n <xs:complexType>\n <xs:sequence>\n <xs:element ref=\"country\"/>\n <xs:element ref=\"record\"/>\n </xs:sequence>\n <xs:attribute name=\"make\" use=\"required\" type=\"xs:NCName\"/>\n <xs:attribute name=\"name\" use=\"required\"/>\n <xs:attribute name=\"year\" use=\"required\" type=\"xs:integer\"/>\n </xs:complexType>\n </xs:element>\n <xs:element name=\"country\" type=\"xs:string\"/>\n <xs:element name=\"record\">\n <xs:complexType mixed=\"true\">\n <xs:attribute name=\"type\" use=\"required\" type=\"xs:NCName\"/>\n </xs:complexType>\n </xs:element>\n</xs:schema>"+LINE_SEPARATOR));
}
@Test public void
doesnt_include_default_charset_in_request_log_when_it_is_configured_not_to_be_added() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(logRequestTo(captor)).
config(RestAssured.config().encoderConfig(encoderConfig().appendDefaultContentCharsetToContentTypeIfUndefined(false))).
param("foo", "bar").
contentType(ContentType.XML).
when().
post("/contentTypeAsBody").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/contentTypeAsBody\nProxy:\t\t\t<none>\nRequest params:\tfoo=bar\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/xml\nCookies:\t\t<none>\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test public void
includes_default_charset_in_request_log_by_default() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(logRequestTo(captor)).
param("foo", "bar").
contentType(ContentType.XML).
when().
post("/contentTypeAsBody").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/contentTypeAsBody\nProxy:\t\t\t<none>\nRequest params:\tfoo=bar\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/xml; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\t\t\t<none>"+LINE_SEPARATOR));
}
@Test public void
form_param_are_logged_as_query_params_for_get_requests() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(logRequestTo(captor)).
formParam("firstName", "John").
formParam("lastName", "Doe").
when().
get("/greet").
then().
body("greeting", equalTo("Greetings John Doe"));
assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest URI:\thttp://localhost:8080/greet?firstName=John&lastName=Doe\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\tfirstName=John\n\t\t\t\tlastName=Doe\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/x-www-form-urlencoded; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\t\t\t<none>"+LINE_SEPARATOR));
}
@Test public void
using_log_detail_method_only_logs_the_request_method() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(LogDetail.METHOD, captor)).
queryParam("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greet").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request method:\tGET" + LINE_SEPARATOR));
}
@Test public void
using_log_detail_uri_only_logs_the_request_uri() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(LogDetail.URI, captor)).
queryParam("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greet").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request URI:\thttp://localhost:8080/greet?firstName=John&lastName=Doe" + LINE_SEPARATOR));
}
@Test public void
shows_request_log_as_url_encoded_when_explicitly_instructing_request_logging_filter_to_do_so() throws UnsupportedEncodingException {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(LogDetail.URI, true, captor, true)).
queryParam("firstName", "John#€").
queryParam("lastName", "Doe").
when().
get("/greet").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request URI:\thttp://localhost:8080/greet?firstName=John" + URLEncoder.encode("#€", "UTF-8") + "&lastName=Doe" + LINE_SEPARATOR));
}
@Test public void
shows_request_log_as_without_url_encoding_when_explicitly_instructing_request_logging_filter_to_do_so() throws UnsupportedEncodingException {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(LogDetail.URI, true, captor, false)).
queryParam("firstName", "John#€").
queryParam("lastName", "Doe").
when().
get("/greet").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request URI:\thttp://localhost:8080/greet?firstName=John#€&lastName=Doe" + LINE_SEPARATOR));
}
// This was previously a bug (https://github.com/rest-assured/rest-assured/issues/684)
@Test public void
assert_that_register_text_json_content_type_can_be_used_in_conjunction_with_enable_logging_of_request_and_response_if_validation_fails() {
RestAssured.baseURI = "http://127.0.0.1";
RestAssured.port = 8080;
RestAssured.enableLoggingOfRequestAndResponseIfValidationFails();
RestAssured.registerParser("text/json", Parser.JSON);
when().get("/text-json").then().body("test", is(true));
}
} | examples/rest-assured-itest-java/src/test/java/io/restassured/itest/java/LoggingITest.java | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.restassured.itest.java;
import io.restassured.RestAssured;
import io.restassured.builder.ResponseBuilder;
import io.restassured.config.LogConfig;
import io.restassured.filter.Filter;
import io.restassured.filter.FilterContext;
import io.restassured.filter.log.LogDetail;
import io.restassured.filter.log.RequestLoggingFilter;
import io.restassured.filter.log.ResponseLoggingFilter;
import io.restassured.http.ContentType;
import io.restassured.itest.java.objects.Greeting;
import io.restassured.itest.java.objects.Message;
import io.restassured.itest.java.objects.ScalatraObject;
import io.restassured.itest.java.support.WithJetty;
import io.restassured.response.Response;
import io.restassured.specification.FilterableRequestSpecification;
import io.restassured.specification.FilterableResponseSpecification;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.WriterOutputStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import static io.restassured.RestAssured.expect;
import static io.restassured.RestAssured.given;
import static io.restassured.config.EncoderConfig.encoderConfig;
import static io.restassured.config.RestAssuredConfig.config;
import static io.restassured.filter.log.ErrorLoggingFilter.logErrorsTo;
import static io.restassured.filter.log.LogDetail.COOKIES;
import static io.restassured.filter.log.RequestLoggingFilter.logRequestTo;
import static io.restassured.filter.log.ResponseLoggingFilter.logResponseTo;
import static io.restassured.filter.log.ResponseLoggingFilter.logResponseToIfMatches;
import static io.restassured.parsing.Parser.JSON;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class LoggingITest extends WithJetty {
public static final String LINE_SEPARATOR = System.getProperty("line.separator");
@Before
public void setup() throws Exception {
RestAssured.config = config().logConfig(LogConfig.logConfig().enablePrettyPrinting(false));
}
@After
public void teardown() throws Exception {
RestAssured.reset();
}
@Test
public void errorLoggingFilterWorks() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logErrorsTo(captor)).and().expect().body(equalTo("ERROR")).when().get("/409");
assertThat(writer.toString(), containsString("ERROR"));
}
@Test
public void logErrorsUsingRequestSpec() throws Exception {
expect().log().ifError().body(equalTo("ERROR")).when().get("/409");
}
@Test
public void logUsingRequestSpec() throws Exception {
given().log().everything().and().expect().body(equalTo("ERROR")).when().get("/409");
}
@Test
public void logUsingResponseSpec() throws Exception {
expect().log().everything().body(equalTo("ERROR")).when().get("/409");
}
@Test
public void logResponseThatHasCookiesWithLogDetailAll() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseTo(captor)).and().expect().body(equalTo("OK")).when().get("/multiCookie");
assertThat(writer.toString(), allOf(startsWith("HTTP/1.1 200 OK\nContent-Type: text/plain;charset=utf-8\nSet-Cookie: cookie1=cookieValue1;Domain=localhost\nExpires:"),
containsString("Set-Cookie: cookie1=cookieValue2;Version=1;Path=/;Domain=localhost;Expires="), endsWith(";Max-Age=1234567;Secure;Comment=\"My Purpose\"\nContent-Length: 2\nServer: Jetty(9.3.2.v20150730)\n\nOK" + LINE_SEPARATOR)));
}
@Test
public void logResponseThatHasCookiesWithLogDetailCookies() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseTo(captor, COOKIES)).and().expect().body(equalTo("OK")).when().get("/multiCookie");
assertThat(writer.toString(), allOf(startsWith("cookie1=cookieValue1;Domain=localhost\ncookie1=cookieValue2;Comment=\"My Purpose\";Path=/;Domain=localhost;Max-Age=1234567;Secure;Expires="), endsWith(";Version=1" + LINE_SEPARATOR)));
}
@Test
public void loggingResponseFilterLogsErrors() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseTo(captor)).and().expect().body(equalTo("ERROR")).when().get("/409");
assertThat(writer.toString(), containsString("ERROR"));
}
@Test
public void loggingResponseFilterLogsNonErrors() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseTo(captor)).expect().body("greeting", equalTo("Greetings John Doe")).when().get("/greet?firstName=John&lastName=Doe");
assertThat(writer.toString(), containsString("{\"greeting\":\"Greetings John Doe\"}"));
}
@Test
public void loggingResponseFilterLogsToSpecifiedWriterWhenMatcherIsFulfilled() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseToIfMatches(captor, equalTo(200))).expect().body("greeting", equalTo("Greetings John Doe")).when().get("/greet?firstName=John&lastName=Doe");
assertThat(writer.toString(), containsString("{\"greeting\":\"Greetings John Doe\"}"));
}
@Test
public void loggingResponseFilterDoesntLogWhenSpecifiedMatcherIsNotFulfilled() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().filter(logResponseToIfMatches(captor, equalTo(400))).expect().body("greeting", equalTo("Greetings John Doe")).when().get("/greet?firstName=John&lastName=Doe");
assertThat(writer.toString(), is(""));
}
@Test
public void loggingResponseFilterLogsWhenExpectationsFail() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
try {
given().filter(logResponseTo(captor)).expect().body("greeting", equalTo("Greetings John Do")).when().get("/greet?firstName=John&lastName=Doe");
fail("Should throw exception");
} catch (AssertionError e) {
assertThat(writer.toString(), containsString("{\"greeting\":\"Greetings John Doe\"}"));
}
}
@Test
public void loggingRequestFilterWithParamsCookiesAndHeaders() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(captor)).
formParam("firstName", "John").
formParam("lastName", "Doe").
queryParam("something1", "else1").
queryParam("something2", "else2").
queryParam("something3", "else3").
param("hello1", "world1").
param("hello2", "world2").
param("multiParam", "multi1", "multi2").
cookie("multiCookie", "value1", "value2").
cookie("standardCookie", "standard value").
header("multiHeader", "headerValue1", "headerValue2").
header("standardHeader", "standard header value").
expect().
body("greeting", equalTo("Greetings John Doe")).
when().
post("/greet");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/greet?something1=else1&something2=else2&something3=else3\nProxy:\t\t\t<none>\nRequest params:\thello1=world1\n\t\t\t\thello2=world2\n\t\t\t\tmultiParam=[multi1, multi2]\nQuery params:\tsomething1=else1\n\t\t\t\tsomething2=else2\n\t\t\t\tsomething3=else3\nForm params:\tfirstName=John\n\t\t\t\tlastName=Doe\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tmultiHeader=headerValue1\n\t\t\t\tmultiHeader=headerValue2\n\t\t\t\tstandardHeader=standard header value\n\t\t\t\tAccept=*/*\n" +
"\t\t\t\tContent-Type=application/x-www-form-urlencoded; charset=" + RestAssured.config().getEncoderConfig().defaultContentCharset() +
"\nCookies:\t\tmultiCookie=value1\n\t\t\t\tmultiCookie=value2\n\t\t\t\tstandardCookie=standard value\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test(expected = IllegalArgumentException.class)
public void loggingRequestFilterDoesntAcceptStatusAsLogDetail() throws Exception {
new RequestLoggingFilter(LogDetail.STATUS);
}
@Test
public void loggingRequestFilterWithExplicitContentType() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(captor)).
param("firstName", "John").
param("lastName", "Doe").
header("Content-type", "application/json").
expect().
body("greeting", equalTo("Greetings John Doe")).
when().
get("/greet");
assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest URI:\thttp://localhost:8080/greet?firstName=John&lastName=Doe\nProxy:\t\t\t<none>\nRequest params:\tfirstName=John\n\t\t\t\tlastName=Doe\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/json; charset="+ RestAssured.config().getEncoderConfig().defaultCharsetForContentType(ContentType.JSON)+"\nCookies:\t\t<none>\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void loggingRequestFilterPathParams() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(captor)).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest URI:\thttp://localhost:8080/John/Doe\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\tfirstName=John\n\t\t\t\tlastName=Doe\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\nCookies:\t\t<none>\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void loggingRequestFilterWithBody() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final ScalatraObject object = new ScalatraObject();
object.setHello("Hello world");
given().filter(new RequestLoggingFilter(captor)).expect().defaultParser(JSON).given().body(object).when().post("/reflect");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR));
}
@Test
public void loggingRequestAndResponseAtTheSameTimeWhenRequestFilterIsAddedBeforeResponseFilter() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final ScalatraObject object = new ScalatraObject();
object.setHello("Hello world");
given().
filters(new RequestLoggingFilter(captor), new ResponseLoggingFilter(captor)).
body(object).
expect().
defaultParser(JSON).
when().
post("/reflect");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR + "HTTP/1.1 200 OK\nContent-Type: text/plain;charset=iso-8859-1\nContent-Length: 23\nServer: Jetty(9.3.2.v20150730)\n\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR));
}
@Test
public void loggingRequestAndResponseAtTheSameTimeWhenResponseFilterIsAddedBeforeRequestFilter() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final ScalatraObject object = new ScalatraObject();
object.setHello("Hello world");
given().
filters(new ResponseLoggingFilter(captor), new RequestLoggingFilter(captor)).
body(object).
expect().
defaultParser(JSON).
when().
post("/reflect");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR + "HTTP/1.1 200 OK\nContent-Type: text/plain;charset=iso-8859-1\nContent-Length: 23\nServer: Jetty(9.3.2.v20150730)\n\n{\"hello\":\"Hello world\"}" + LINE_SEPARATOR));
}
@Test
public void logEverythingResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(LogConfig.logConfig().defaultStream(captor).and().enablePrettyPrinting(false))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().all().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)\n\n{\"firstName\":\"John\",\"lastName\":\"Doe\",\"fullName\":\"John Doe\"}" + LINE_SEPARATOR));
}
@Test
public void logIfStatusCodeIsEqualToResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(LogConfig.logConfig().defaultStream(captor))).
expect().
log().ifStatusCodeIsEqualTo(409).
when().
get("/409");
assertThat(writer.toString(), equalTo("HTTP/1.1 409 Conflict\nContent-Type: text/plain;charset=utf-8\nContent-Length: 5\nServer: Jetty(9.3.2.v20150730)\n\nERROR" + LINE_SEPARATOR));
}
@Test
public void doesntLogIfStatusCodeIsNotEqualToResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(LogConfig.logConfig().defaultStream(captor))).
expect().
log().ifStatusCodeIsEqualTo(200).
when().
get("/409");
assertThat(writer.toString(), equalTo(""));
}
@Test
public void logIfStatusCodeMatchesResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(LogConfig.logConfig().defaultStream(captor))).
expect().
log().ifStatusCodeMatches(greaterThan(200)).
when().
get("/409");
assertThat(writer.toString(), equalTo("HTTP/1.1 409 Conflict\nContent-Type: text/plain;charset=utf-8\nContent-Length: 5\nServer: Jetty(9.3.2.v20150730)\n\nERROR" + LINE_SEPARATOR));
}
@Test
public void logOnlyBodyUsingResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, false))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().body().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("{\"firstName\":\"John\",\"lastName\":\"Doe\",\"fullName\":\"John Doe\"}" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithPrettyPrintingWhenJson() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().body().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("{\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"fullName\": \"John Doe\"\n}" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithPrettyPrintingWhenXml() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().body().
body("videos.music[0].title", is("Video Title 1")).
when().
get("/videos-not-formatted");
assertThat(writer.toString(), equalTo("<videos>\n <music>\n <title>Video Title 1</title>\n <artist>Artist 1</artist>\n </music>\n <music>\n <title>Video Title 2</title>\n <artist>Artist 2</artist>\n <artist>Artist 3</artist>\n </music>\n</videos>" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithPrettyPrintingWhenHtml() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().body().
body("html.head.title", is("my title")).
when().
get("/textHTML-not-formatted");
assertThat(writer.toString(), equalTo("<html>\n <head>\n <title>my title</title>\n </head>\n <body>\n <p>paragraph 1</p>\n <p>paragraph 2</p>\n </body>\n</html>" + LINE_SEPARATOR));
}
@Test
public void logAllWithPrettyPrintingWhenJson() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().all().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)\n\n{\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"fullName\": \"John Doe\"\n}" + LINE_SEPARATOR));
}
@Test
public void logAllWithPrettyPrintingUsingDSLWhenJson() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, false))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().all(true).
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)\n\n{\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"fullName\": \"John Doe\"\n}" + LINE_SEPARATOR));
}
@Test
public void logAllWithNoPrettyPrintingUsingDSLWhenJson() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().all(false).
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)\n\n{\"firstName\":\"John\",\"lastName\":\"Doe\",\"fullName\":\"John Doe\"}" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithPrettyPrintingUsingDSLWhenXml() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, false))).
expect().
log().body(true).
body("videos.music[0].title", is("Video Title 1")).
when().
get("/videos-not-formatted");
assertThat(writer.toString(), equalTo("<videos>\n <music>\n <title>Video Title 1</title>\n <artist>Artist 1</artist>\n </music>\n <music>\n <title>Video Title 2</title>\n <artist>Artist 2</artist>\n <artist>Artist 3</artist>\n </music>\n</videos>" + LINE_SEPARATOR));
}
@Test
public void logOnlyResponseBodyWithNoPrettyPrintingUsingDSLWhenXml() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().body(false).
body("videos.music[0].title", is("Video Title 1")).
when().
get("/videos-not-formatted");
assertThat(writer.toString(), equalTo("<videos><music><title>Video Title 1</title><artist>Artist 1</artist></music><music><title>Video Title 2</title><artist>Artist 2</artist><artist>Artist 3</artist></music></videos>" + LINE_SEPARATOR));
}
@Test
public void logOnlyStatusUsingResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().status().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK" + LINE_SEPARATOR));
}
@Test
public void logOnlyHeadersUsingResponseUsingLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
pathParam("firstName", "John").
pathParam("lastName", "Doe").
expect().
log().headers().
body("fullName", equalTo("John Doe")).
when().
get("/{firstName}/{lastName}");
assertThat(writer.toString(), equalTo("Content-Type: application/json;charset=utf-8\nContent-Length: 59\nServer: Jetty(9.3.2.v20150730)" + LINE_SEPARATOR));
}
@Test
public void logOnlyHeadersUsingResponseUsingLogSpecWhenMultiHeaders() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().headers().
when().
get("/multiValueHeader");
assertThat(writer.toString(), equalTo("Content-Type: text/plain;charset=utf-8\nMultiHeader: Value 1\nMultiHeader: Value 2\nContent-Length: 0\nServer: Jetty(9.3.2.v20150730)" + LINE_SEPARATOR));
}
@Test
public void logOnlyCookiesUsingResponseLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().cookies().
when().
get("/multiCookie");
assertThat(writer.toString(), allOf(startsWith("cookie1=cookieValue1;Domain=localhost\ncookie1=cookieValue2;Comment=\"My Purpose\";Path=/;Domain=localhost;Max-Age=1234567;Secure;Expires="), endsWith(";Version=1" + LINE_SEPARATOR)));
}
@Test
public void logBodyPrettyPrintedUsingResponseLogSpecWhenContentTypeDoesntMatchContent() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
expect().
log().everything().
body(equalTo("This is not a valid JSON document")).
when().
get("/contentTypeJsonButContentIsNotJson");
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: application/json;charset=utf-8\nContent-Length: 33\nServer: Jetty(9.3.2.v20150730)\n\nThis is not a valid JSON document" + LINE_SEPARATOR));
}
@Test
public void logAllUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().everything().
param("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greetJSON");
assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest URI:\thttp://localhost:8080/greetJSON?firstName=John&lastName=Doe\nProxy:\t\t\t<none>\nRequest params:\tfirstName=John\nQuery params:\tlastName=Doe\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\nCookies:\t\t<none>\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void logParamsUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().parameters().
param("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greetJSON");
assertThat(writer.toString(), equalTo("Request params:\tfirstName=John\nQuery params:\tlastName=Doe\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void logNoValueParamsUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().parameters().
formParam("formParam").
queryParam("queryParam").
when().
post("/noValueParam");
assertThat(writer.toString(), equalTo("Request params:\t<none>\nQuery params:\tqueryParam\nForm params:\tformParam\nPath params:\t<none>\nMultiparts:\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void logBodyUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().body().
param("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greetJSON");
assertThat(writer.toString(), equalTo("Body:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test
public void logBodyWithPrettyPrintingUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
contentType(ContentType.JSON).
config(config().logConfig(new LogConfig(captor, true))).
log().body().
body("{ \"something\" : \"else\" }").
when().
post("/body");
assertThat(writer.toString(), equalTo("Body:\n{\n \"something\": \"else\"\n}" + LINE_SEPARATOR));
}
@Test
public void logBodyWithPrettyPrintingUsingDslAndRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
contentType(ContentType.JSON).
config(config().logConfig(new LogConfig(captor, false))).
log().body(true).
body("{ \"something\" : \"else\" }").
when().
post("/body");
assertThat(writer.toString(), equalTo("Body:\n{\n \"something\": \"else\"\n}" + LINE_SEPARATOR));
}
@Test
public void logBodyWithPrettyPrintingUsingRequestLogSpecAndObjectMapping() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final Message message = new Message();
message.setMessage("My message");
given().
contentType(ContentType.JSON).
config(config().logConfig(new LogConfig(captor, true))).
log().body().
body(message).
when().
post("/body");
assertThat(writer.toString(), equalTo("Body:\n{\n \"message\": \"My message\"\n}" + LINE_SEPARATOR));
}
@Test
public void logBodyWithPrettyPrintingUsingRequestLogSpecAndObjectMappingWhenXML() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
final Greeting greeting = new Greeting();
greeting.setFirstName("John");
greeting.setLastName("Doe");
given().
contentType(ContentType.XML).
config(config().logConfig(new LogConfig(captor, true))).
log().body().
body(greeting).
when().
post("/body");
assertThat(writer.toString(), equalTo("Body:\n<greeting>\n <firstName>John</firstName>\n <lastName>Doe</lastName>\n</greeting>" + LINE_SEPARATOR));
}
@Test
public void logCookiesUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().cookies().
cookie("myCookie1", "myCookieValue1").
cookie("myCookie2", "myCookieValue2").
cookie("myMultiCookie", "myMultiCookieValue1", "myMultiCookieValue2").
when().
post("/reflect");
assertThat(writer.toString(), equalTo("Cookies:\t\tmyCookie1=myCookieValue1\n\t\t\t\tmyCookie2=myCookieValue2\n\t\t\t\tmyMultiCookie=myMultiCookieValue1\n\t\t\t\tmyMultiCookie=myMultiCookieValue2" + LINE_SEPARATOR));
}
@Test
public void logHeadersUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().headers().
header("myHeader1", "myHeaderValue1").
header("myHeader2", "myHeaderValue2").
header("myMultiHeader", "myMultiHeaderValue1", "myMultiHeaderValue2").
when().
get("/multiHeaderReflect");
assertThat(writer.toString(), equalTo("Headers:\t\tmyHeader1=myHeaderValue1\n\t\t\t\tmyHeader2=myHeaderValue2\n\t\t\t\tmyMultiHeader=myMultiHeaderValue1\n\t\t\t\tmyMultiHeader=myMultiHeaderValue2\n\t\t\t\tAccept=*/*" + LINE_SEPARATOR));
}
@Test
public void logBodyPrettyPrintedUsingRequestLogSpecWhenContentTypeDoesntMatchContent() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().everything().
contentType("application/json").
body("This is not JSON").
when().
post("/reflect");
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/json; charset="+ RestAssured.config().getEncoderConfig().defaultCharsetForContentType(ContentType.JSON)+"\nCookies:\t\t<none>\nBody:\nThis is not JSON" + LINE_SEPARATOR));
}
@Test
public void logAllWhenBasePathIsDefinedUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
RestAssured.basePath = "/reflect";
try {
given().
config(config().logConfig(new LogConfig(captor, true))).
log().all().
body("hello").
when().
post("/");
} finally {
RestAssured.reset();
}
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect/\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\nhello" + LINE_SEPARATOR));
}
@Test
public void logAllWhenBaseURIIsDefinedUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
RestAssured.baseURI = "http://localhost:8080/reflect";
try {
given().
config(config().logConfig(new LogConfig(captor, true))).
log().all().
body("hello").
when().
post("/");
} finally {
RestAssured.reset();
}
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect/\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\nhello" + LINE_SEPARATOR));
}
@Test
public void logAllWhenBasePathAndBasePortAndBaseURIIsDefinedUsingRequestLogSpec() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
RestAssured.baseURI = "http://localhost";
RestAssured.port = 8080;
RestAssured.basePath = "/reflect";
try {
given().
config(config().logConfig(new LogConfig(captor, true))).
log().all().
body("hello").
when().
post("/");
} finally {
RestAssured.reset();
}
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/reflect/\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=text/plain; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\nhello" + LINE_SEPARATOR));
}
@Test
public void logsFullyQualifiedUrlsAreLoggedCorrectly() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
config(config().logConfig(new LogConfig(captor, true))).
log().all().
filter(new Filter() {
public Response filter(FilterableRequestSpecification requestSpec, FilterableResponseSpecification responseSpec, FilterContext ctx) {
return new ResponseBuilder().setStatusCode(200).setBody("changed").build();
}
}).get("http://www.beijingchina.net.cn/transportation/train/train-to-shanghai.html");
assertThat(writer.toString(), startsWith("Request method:\tGET\nRequest URI:\thttp://www.beijingchina.net.cn/transportation/train/train-to-shanghai.html"));
}
@Test
public void logsXmlNamespacesCorrectly() throws Exception {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(logResponseTo(captor)).
expect().
statusCode(200).
when().
get("/namespace-example");
assertThat(writer.toString(), containsString("foo xmlns:ns=\"http://localhost/\">\n <bar>sudo </bar>\n <ns:bar>make me a sandwich!</ns:bar>\n </foo>"));
}
@Test
public void logsMultiPartParamsOnLogAll() throws Exception {
// Given
final byte[] bytes = IOUtils.toByteArray(getClass().getResourceAsStream("/car-records.xsd"));
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
InputStream powermock = getClass().getResourceAsStream("/powermock-easymock-junit-1.4.12.zip");
// When
given().
filter(logResponseTo(captor)).
multiPart("file", "myFile", bytes).
multiPart("something", "testing", "text/plain").
multiPart("powermock", "powermock-1.4.12", powermock).
when().
post("/multipart/file").
then().
statusCode(200).
body(is(new String(bytes)));
assertThat(writer.toString(), equalTo("HTTP/1.1 200 OK\nContent-Type: text/plain;charset=utf-8\nContent-Length: 1512\nServer: Jetty(9.3.2.v20150730)\n\n<!--\n ~ Copyright 2013 the original author or authors.\n ~\n ~ Licensed under the Apache License, Version 2.0 (the \"License\");\n ~ you may not use this file except in compliance with the License.\n ~ You may obtain a copy of the License at\n ~\n ~ http://www.apache.org/licenses/LICENSE-2.0\n ~\n ~ Unless required by applicable law or agreed to in writing, software\n ~ distributed under the License is distributed on an \"AS IS\" BASIS,\n ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n ~ See the License for the specific language governing permissions and\n ~ limitations under the License.\n -->\n<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" elementFormDefault=\"qualified\">\n <xs:element name=\"records\">\n <xs:complexType>\n <xs:sequence>\n <xs:element maxOccurs=\"unbounded\" ref=\"car\"/>\n </xs:sequence>\n </xs:complexType>\n </xs:element>\n <xs:element name=\"car\">\n <xs:complexType>\n <xs:sequence>\n <xs:element ref=\"country\"/>\n <xs:element ref=\"record\"/>\n </xs:sequence>\n <xs:attribute name=\"make\" use=\"required\" type=\"xs:NCName\"/>\n <xs:attribute name=\"name\" use=\"required\"/>\n <xs:attribute name=\"year\" use=\"required\" type=\"xs:integer\"/>\n </xs:complexType>\n </xs:element>\n <xs:element name=\"country\" type=\"xs:string\"/>\n <xs:element name=\"record\">\n <xs:complexType mixed=\"true\">\n <xs:attribute name=\"type\" use=\"required\" type=\"xs:NCName\"/>\n </xs:complexType>\n </xs:element>\n</xs:schema>"+LINE_SEPARATOR));
}
@Test public void
doesnt_include_default_charset_in_request_log_when_it_is_configured_not_to_be_added() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(logRequestTo(captor)).
config(RestAssured.config().encoderConfig(encoderConfig().appendDefaultContentCharsetToContentTypeIfUndefined(false))).
param("foo", "bar").
contentType(ContentType.XML).
when().
post("/contentTypeAsBody").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/contentTypeAsBody\nProxy:\t\t\t<none>\nRequest params:\tfoo=bar\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/xml\nCookies:\t\t<none>\nBody:\t\t\t<none>" + LINE_SEPARATOR));
}
@Test public void
includes_default_charset_in_request_log_by_default() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(logRequestTo(captor)).
param("foo", "bar").
contentType(ContentType.XML).
when().
post("/contentTypeAsBody").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request method:\tPOST\nRequest URI:\thttp://localhost:8080/contentTypeAsBody\nProxy:\t\t\t<none>\nRequest params:\tfoo=bar\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/xml; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\t\t\t<none>"+LINE_SEPARATOR));
}
@Test public void
form_param_are_logged_as_query_params_for_get_requests() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(logRequestTo(captor)).
formParam("firstName", "John").
formParam("lastName", "Doe").
when().
get("/greet").
then().
body("greeting", equalTo("Greetings John Doe"));
assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest URI:\thttp://localhost:8080/greet?firstName=John&lastName=Doe\nProxy:\t\t\t<none>\nRequest params:\t<none>\nQuery params:\t<none>\nForm params:\tfirstName=John\n\t\t\t\tlastName=Doe\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\n\t\t\t\tContent-Type=application/x-www-form-urlencoded; charset="+ RestAssured.config().getEncoderConfig().defaultContentCharset()+"\nCookies:\t\t<none>\nBody:\t\t\t<none>"+LINE_SEPARATOR));
}
@Test public void
using_log_detail_method_only_logs_the_request_method() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(LogDetail.METHOD, captor)).
queryParam("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greet").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request method:\tGET" + LINE_SEPARATOR));
}
@Test public void
using_log_detail_uri_only_logs_the_request_uri() {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(LogDetail.URI, captor)).
queryParam("firstName", "John").
queryParam("lastName", "Doe").
when().
get("/greet").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request URI:\thttp://localhost:8080/greet?firstName=John&lastName=Doe" + LINE_SEPARATOR));
}
@Test public void
shows_request_log_as_url_encoded_when_explicitly_instructing_request_logging_filter_to_do_so() throws UnsupportedEncodingException {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(LogDetail.URI, true, captor, true)).
queryParam("firstName", "John#€").
queryParam("lastName", "Doe").
when().
get("/greet").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request URI:\thttp://localhost:8080/greet?firstName=John" + URLEncoder.encode("#€", "UTF-8") + "&lastName=Doe" + LINE_SEPARATOR));
}
@Test public void
shows_request_log_as_without_url_encoding_when_explicitly_instructing_request_logging_filter_to_do_so() throws UnsupportedEncodingException {
final StringWriter writer = new StringWriter();
final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true);
given().
filter(new RequestLoggingFilter(LogDetail.URI, true, captor, false)).
queryParam("firstName", "John#€").
queryParam("lastName", "Doe").
when().
get("/greet").
then().
statusCode(200);
assertThat(writer.toString(), equalTo("Request URI:\thttp://localhost:8080/greet?firstName=John#€&lastName=Doe" + LINE_SEPARATOR));
}
} | Added test for verify that issue 684 is resolved
| examples/rest-assured-itest-java/src/test/java/io/restassured/itest/java/LoggingITest.java | Added test for verify that issue 684 is resolved |
|
Java | apache-2.0 | 2e981e03a3a995a3d616688f2ec07f8506f3d56b | 0 | ferstl/depgraph-maven-plugin | /*
* Copyright (c) 2014 - 2016 by Stefan Ferstl <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.ferstl.depgraph.graph;
import java.util.Set;
import org.apache.maven.artifact.Artifact;
import com.github.ferstl.depgraph.dot.AttributeBuilder;
import com.github.ferstl.depgraph.dot.NodeAttributeRenderer;
import com.github.ferstl.depgraph.graph.style.StyleConfiguration;
import com.google.common.base.Joiner;
import static com.google.common.collect.Iterables.getFirst;
public class DependencyNodeLabelRenderer implements NodeAttributeRenderer<GraphNode> {
private static final Joiner SLASH_JOINER = Joiner.on("/").skipNulls();
private final boolean showGroupId;
private final boolean showArtifactId;
private final boolean showVersion;
private final StyleConfiguration styleConfiguration;
public DependencyNodeLabelRenderer(boolean showGroupId, boolean showArtifactId, boolean showVersion, StyleConfiguration styleConfiguration) {
this.showGroupId = showGroupId;
this.showArtifactId = showArtifactId;
this.showVersion = showVersion;
this.styleConfiguration = styleConfiguration;
}
@Override
public AttributeBuilder createNodeAttributes(GraphNode node) {
Artifact artifact = node.getArtifact();
String scopes = createScopeString(node.getScopes());
return this.styleConfiguration.nodeAttributes(
this.showGroupId ? artifact.getGroupId() : null,
this.showArtifactId ? artifact.getArtifactId() : null,
this.showVersion ? artifact.getVersion() : null,
artifact.getType(),
scopes,
getFirst(node.getScopes(), null));
}
private static String createScopeString(Set<String> scopes) {
if (scopes.size() > 1 || !scopes.contains("compile")) {
return "(" + SLASH_JOINER.join(scopes) + ")";
}
return "";
}
}
| src/main/java/com/github/ferstl/depgraph/graph/DependencyNodeLabelRenderer.java | /*
* Copyright (c) 2014 - 2016 by Stefan Ferstl <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.ferstl.depgraph.graph;
import java.util.Set;
import org.apache.maven.artifact.Artifact;
import com.github.ferstl.depgraph.dot.AttributeBuilder;
import com.github.ferstl.depgraph.dot.NodeAttributeRenderer;
import com.github.ferstl.depgraph.graph.style.StyleConfiguration;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
public class DependencyNodeLabelRenderer implements NodeAttributeRenderer<GraphNode> {
private static final Joiner SLASH_JOINER = Joiner.on("/").skipNulls();
private final boolean showGroupId;
private final boolean showArtifactId;
private final boolean showVersion;
private final StyleConfiguration styleConfiguration;
public DependencyNodeLabelRenderer(boolean showGroupId, boolean showArtifactId, boolean showVersion, StyleConfiguration styleConfiguration) {
this.showGroupId = showGroupId;
this.showArtifactId = showArtifactId;
this.showVersion = showVersion;
this.styleConfiguration = styleConfiguration;
}
@Override
public AttributeBuilder createNodeAttributes(GraphNode node) {
Artifact artifact = node.getArtifact();
String scopes = createScopeString(node.getScopes());
return this.styleConfiguration.nodeAttributes(
this.showGroupId ? artifact.getGroupId() : null,
this.showArtifactId ? artifact.getArtifactId() : null,
this.showVersion ? artifact.getVersion() : null,
artifact.getType(),
scopes, Iterables.getFirst(node.getScopes(), null));
}
private static String createScopeString(Set<String> scopes) {
if (scopes.size() > 1 || !scopes.contains("compile")) {
return "(" + SLASH_JOINER.join(scopes) + ")";
}
return "";
}
}
| Cosmetics | src/main/java/com/github/ferstl/depgraph/graph/DependencyNodeLabelRenderer.java | Cosmetics |
|
Java | apache-2.0 | 1cbae831cbd427787ef3802d3dde04669bac163b | 0 | AntonKondratkov/akondratkov,AntonKondratkov/akondratkov,AntonKondratkov/akondratkov | chapter_001/src/main/java/ru/job4j/calculator/CalculatorTest.java | package ru.job4j.calculator;
import org.junit.Test;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
public class CalculatorTest {
@Test
public void whenAddOnePlusOneThenTwo() {
Calculator calc = new Calculator();
calc.add(1D, 1D);
double result = calc.getResult();
double expected = 2D;
assertThat(result, is(expected));
}
} | Removed file CalculatorTest from repository
| chapter_001/src/main/java/ru/job4j/calculator/CalculatorTest.java | Removed file CalculatorTest from repository |
||
Java | apache-2.0 | ef30c88863abadcd1c193351dab73024ae81ecb0 | 0 | jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2 | src/foam/blob/HttpServletRequestBlob.java | /**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.blob;
import javax.servlet.http.HttpServletRequest;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
public class HttpServletRequestBlob
extends foam.blob.AbstractBlob
{
protected long size_;
protected long pos_ = 0;
protected BufferedInputStream reader_;
public HttpServletRequestBlob(HttpServletRequest request) throws IOException {
this.reader_ = new BufferedInputStream(request.getInputStream());
if ( request.getContentLength() == -1 ) {
throw new RuntimeException("Invalid content length");
}
this.size_ = request.getContentLength();
}
@Override
public Buffer read(Buffer buffer, long offset) {
try {
if ( offset != pos_ ) {
throw new RuntimeException("Offset does not match stream position");
}
int outOffset = 0;
long length = Math.min(buffer.getLength(), getSize() - offset);
if (length < buffer.getLength()) {
buffer = buffer.slice(0, length);
}
ByteBuffer bb = buffer.getData();
byte[] buf = new byte[(int) length];
while ( outOffset < length ) {
int bytesRead = reader_.read(buf, outOffset, (int) length);
bb.put(buf, outOffset, bytesRead);
outOffset += bytesRead;
pos_ += bytesRead;
}
bb.rewind();
buffer.setData(bb);
return buffer;
} catch (Throwable t) {
t.printStackTrace();
return null;
}
}
@Override
public long getSize() {
return this.size_;
}
} | Deleted HttpServletRequestBlob
| src/foam/blob/HttpServletRequestBlob.java | Deleted HttpServletRequestBlob |
||
Java | mit | eb0333dd3a344eb6e4bd30cbc03236f01ddf1919 | 0 | PLOS/wombat,PLOS/wombat,PLOS/wombat,PLOS/wombat | package org.ambraproject.wombat.service.remote;
import com.google.common.collect.ImmutableList;
import org.ambraproject.wombat.service.ApiAddress;
import org.ambraproject.wombat.util.CacheKey;
import org.apache.http.Header;
import org.apache.http.HttpEntityEnclosingRequest;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.lang.reflect.Type;
import java.net.URI;
import java.net.URL;
import java.util.function.Function;
/**
* Base implementation for {@link RestfulJsonApi}. Details about the service address, caching, authentication headers,
* etc., are injected by subclasses.
*/
abstract class AbstractRestfulJsonApi implements RestfulJsonApi {
@Autowired
protected JsonService jsonService;
@Autowired
protected CachedRemoteService<InputStream> cachedRemoteStreamer;
@Autowired
protected CachedRemoteService<Reader> cachedRemoteReader;
/**
* @return the base URL to which request addresses will be appended
*/
protected abstract URL getServerUrl();
/**
* @return a string, which is constant and unique for each service, to identify cached values the service
*/
protected abstract String getCachePrefix();
/**
* @return headers to add to every outgoing request to the service
*/
protected Iterable<? extends Header> getAdditionalHeaders() {
return ImmutableList.of();
}
@FunctionalInterface
protected static interface RemoteRequest<T> {
T execute() throws IOException;
}
/**
* Execute a remote request that has been set up by another method. Every invocation by this class to {@link
* #cachedRemoteStreamer} and {@link #cachedRemoteReader} is wrapped in a call to {@link #makeRemoteRequest}.
* <p>
* Subclasses may override this method to add special exception handling. Each override must make a {@code super}
* call.
*/
protected <T> T makeRemoteRequest(RemoteRequest<T> requestAction) throws IOException {
return requestAction.execute();
}
@Override
public final InputStream requestStream(ApiAddress address) throws IOException {
return makeRemoteRequest(() -> cachedRemoteStreamer.request(buildGet(address)));
}
@Override
public final Reader requestReader(ApiAddress address) throws IOException {
return makeRemoteRequest(() -> cachedRemoteReader.request(buildGet(address)));
}
@Override
public final <T> T requestObject(ApiAddress address, Type responseType) throws IOException {
CacheKey cacheKey = CacheKey.create(getCachePrefix(), address.getAddress());
// Just try to cache everything. We may want to narrow this in the future.
return requestCachedObject(cacheKey, address, responseType);
}
@Override
public final <T> T requestObject(ApiAddress address, Class<T> responseClass) throws IOException {
return requestObject(address, (Type) responseClass);
}
private <R extends HttpUriRequest & HttpEntityEnclosingRequest>
HttpResponse uploadObject(ApiAddress address, Object object, Function<URI, R> requestConstructor)
throws IOException {
R request = buildRequest(address, requestConstructor);
ContentType contentType = ContentType.APPLICATION_JSON;
if (object != null) {
String json = jsonService.serialize(object);
request.setEntity(new StringEntity(json, contentType));
}
request.addHeader(HttpHeaders.CONTENT_TYPE, contentType.toString());
try (CloseableHttpResponse response = cachedRemoteReader.getResponse(request)) {
//return closed response
return response;
}
}
@Override
public final HttpResponse postObject(ApiAddress address, Object object) throws IOException {
return uploadObject(address, object, HttpPost::new);
}
@Override
public final void putObject(ApiAddress address, Object object) throws IOException {
uploadObject(address, object, HttpPut::new);
}
@Override
public final void deleteObject(ApiAddress address) throws IOException {
HttpDelete delete = buildRequest(address, HttpDelete::new);
try (CloseableHttpResponse ignored = cachedRemoteReader.getResponse(delete)) {
ignored.close();
}
}
@Override
public final CloseableHttpResponse getResponse(HttpUriRequest target) throws IOException {
return makeRemoteRequest(() -> cachedRemoteReader.getResponse(target));
}
@Override
public final <T> T requestCachedStream(CacheKey cacheKey, ApiAddress address,
CacheDeserializer<InputStream, T> callback) throws IOException {
return makeRemoteRequest(() -> cachedRemoteStreamer.requestCached(cacheKey, buildGet(address), callback));
}
@Override
public final <T> T requestCachedReader(CacheKey cacheKey, ApiAddress address,
CacheDeserializer<Reader, T> callback) throws IOException {
return makeRemoteRequest(() -> cachedRemoteReader.requestCached(cacheKey, buildGet(address), callback));
}
@Override
public final <T> T requestCachedObject(CacheKey cacheKey, ApiAddress address, Type responseType) throws IOException {
return makeRemoteRequest(() ->
(T) jsonService.requestCachedObject(cachedRemoteReader, cacheKey, buildGet(address), responseType));
}
@Override
public final <T> T requestCachedObject(CacheKey cacheKey, ApiAddress address, Class<T> responseClass) throws IOException {
return requestCachedObject(cacheKey, address, (Type) responseClass);
}
protected final HttpGet buildGet(ApiAddress address) {
return buildRequest(address, HttpGet::new);
}
private <R extends HttpUriRequest> R buildRequest(ApiAddress address, Function<URI, R> requestConstructor) {
URI uri = address.buildUri(this.getServerUrl());
R request = requestConstructor.apply(uri);
for (Header header : getAdditionalHeaders()) {
request.addHeader(header);
}
return request;
}
}
| src/main/java/org/ambraproject/wombat/service/remote/AbstractRestfulJsonApi.java | package org.ambraproject.wombat.service.remote;
import com.google.common.collect.ImmutableList;
import org.ambraproject.wombat.service.ApiAddress;
import org.ambraproject.wombat.util.CacheKey;
import org.apache.http.Header;
import org.apache.http.HttpEntityEnclosingRequest;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Type;
import java.net.URI;
import java.net.URL;
import java.util.function.Function;
/**
* Base implementation for {@link RestfulJsonApi}. Details about the service address, caching, authentication headers,
* etc., are injected by subclasses.
*/
abstract class AbstractRestfulJsonApi implements RestfulJsonApi {
@Autowired
protected JsonService jsonService;
@Autowired
protected CachedRemoteService<InputStream> cachedRemoteStreamer;
@Autowired
protected CachedRemoteService<Reader> cachedRemoteReader;
/**
* @return the base URL to which request addresses will be appended
*/
protected abstract URL getServerUrl();
/**
* @return a string, which is constant and unique for each service, to identify cached values the service
*/
protected abstract String getCachePrefix();
/**
* @return headers to add to every outgoing request to the service
*/
protected Iterable<? extends Header> getAdditionalHeaders() {
return ImmutableList.of();
}
@FunctionalInterface
protected static interface RemoteRequest<T> {
T execute() throws IOException;
}
/**
* Execute a remote request that has been set up by another method. Every invocation by this class to {@link
* #cachedRemoteStreamer} and {@link #cachedRemoteReader} is wrapped in a call to {@link #makeRemoteRequest}.
* <p>
* Subclasses may override this method to add special exception handling. Each override must make a {@code super}
* call.
*/
protected <T> T makeRemoteRequest(RemoteRequest<T> requestAction) throws IOException {
return requestAction.execute();
}
@Override
public final InputStream requestStream(ApiAddress address) throws IOException {
return makeRemoteRequest(() -> cachedRemoteStreamer.request(buildGet(address)));
}
@Override
public final Reader requestReader(ApiAddress address) throws IOException {
return makeRemoteRequest(() -> cachedRemoteReader.request(buildGet(address)));
}
@Override
public final <T> T requestObject(ApiAddress address, Type responseType) throws IOException {
CacheKey cacheKey = CacheKey.create(getCachePrefix(), address.getAddress());
// Just try to cache everything. We may want to narrow this in the future.
return requestCachedObject(cacheKey, address, responseType);
}
@Override
public final <T> T requestObject(ApiAddress address, Class<T> responseClass) throws IOException {
return requestObject(address, (Type) responseClass);
}
private static final String APPLICATION_JSON_CONTENT_TYPE = ContentType.APPLICATION_JSON.toString();
private <R extends HttpUriRequest & HttpEntityEnclosingRequest>
HttpResponse uploadObject(ApiAddress address, Object object, Function<URI, R> requestConstructor)
throws IOException {
R request = buildRequest(address, requestConstructor);
if (object != null) {
String json = jsonService.serialize(object);
try {
request.setEntity(new StringEntity(json));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
request.addHeader(HttpHeaders.CONTENT_TYPE, APPLICATION_JSON_CONTENT_TYPE);
try (CloseableHttpResponse response = cachedRemoteReader.getResponse(request)) {
//return closed response
return response;
}
}
@Override
public final HttpResponse postObject(ApiAddress address, Object object) throws IOException {
return uploadObject(address, object, HttpPost::new);
}
@Override
public final void putObject(ApiAddress address, Object object) throws IOException {
uploadObject(address, object, HttpPut::new);
}
@Override
public final void deleteObject(ApiAddress address) throws IOException {
HttpDelete delete = buildRequest(address, HttpDelete::new);
try (CloseableHttpResponse ignored = cachedRemoteReader.getResponse(delete)) {
ignored.close();
}
}
@Override
public final CloseableHttpResponse getResponse(HttpUriRequest target) throws IOException {
return makeRemoteRequest(() -> cachedRemoteReader.getResponse(target));
}
@Override
public final <T> T requestCachedStream(CacheKey cacheKey, ApiAddress address,
CacheDeserializer<InputStream, T> callback) throws IOException {
return makeRemoteRequest(() -> cachedRemoteStreamer.requestCached(cacheKey, buildGet(address), callback));
}
@Override
public final <T> T requestCachedReader(CacheKey cacheKey, ApiAddress address,
CacheDeserializer<Reader, T> callback) throws IOException {
return makeRemoteRequest(() -> cachedRemoteReader.requestCached(cacheKey, buildGet(address), callback));
}
@Override
public final <T> T requestCachedObject(CacheKey cacheKey, ApiAddress address, Type responseType) throws IOException {
return makeRemoteRequest(() ->
(T) jsonService.requestCachedObject(cachedRemoteReader, cacheKey, buildGet(address), responseType));
}
@Override
public final <T> T requestCachedObject(CacheKey cacheKey, ApiAddress address, Class<T> responseClass) throws IOException {
return requestCachedObject(cacheKey, address, (Type) responseClass);
}
protected final HttpGet buildGet(ApiAddress address) {
return buildRequest(address, HttpGet::new);
}
private <R extends HttpUriRequest> R buildRequest(ApiAddress address, Function<URI, R> requestConstructor) {
URI uri = address.buildUri(this.getServerUrl());
R request = requestConstructor.apply(uri);
for (Header header : getAdditionalHeaders()) {
request.addHeader(header);
}
return request;
}
}
| DPRO-3100: set content type in json POST requests
| src/main/java/org/ambraproject/wombat/service/remote/AbstractRestfulJsonApi.java | DPRO-3100: set content type in json POST requests |
|
Java | mit | df41fbe00b1300cfe5076bb4e9242214fbe677f9 | 0 | stefanbirkner/mockito,Jazzepi/mockito,LilaQin/mockito,ze-pequeno/mockito,Ariel-Isaacm/mockito,MuShiiii/mockito,GeeChao/mockito,icefoggy/mockito,alberskib/mockito,LilaQin/mockito,stefanbirkner/mockito,diboy2/mockito,ignaciotcrespo/mockito,rototor/mockito,hansjoachim/mockito,windofthesky/mockito,geoffschoeman/mockito,mockito/mockito,JeremybellEU/mockito,TimvdLippe/mockito,ignaciotcrespo/mockito,alberskib/mockito,Jam71/mockito,huangyingw/mockito,windofthesky/mockito,lukasz-szewc/mockito,Jam71/mockito,mohanaraosv/mockito,terebesirobert/mockito,geoffschoeman/mockito,hansjoachim/mockito,MuShiiii/mockito,ze-pequeno/mockito,icefoggy/mockito,lukasz-szewc/mockito,bric3/mockito,bric3/mockito,zorosteven/mockito,mbrukman/mockito,diboy2/mockito,JeremybellEU/mockito,TimvdLippe/mockito,mohanaraosv/mockito,mbrukman/mockito,Jazzepi/mockito,mkordas/mockito,Ariel-Isaacm/mockito,smarkwell/mockito,zorosteven/mockito,smarkwell/mockito,mkordas/mockito,GeeChao/mockito,rototor/mockito,mockito/mockito,bric3/mockito,mockito/mockito | /*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito;
import org.mockito.internal.MockitoCore;
import org.mockito.internal.creation.MockSettingsImpl;
import org.mockito.internal.debugging.MockitoDebuggerImpl;
import org.mockito.internal.stubbing.answers.AnswerReturnValuesAdapter;
import org.mockito.internal.stubbing.answers.CallsRealMethods;
import org.mockito.internal.stubbing.answers.DoesNothing;
import org.mockito.internal.stubbing.answers.Returns;
import org.mockito.internal.stubbing.answers.ThrowsException;
import org.mockito.internal.stubbing.defaultanswers.ReturnsEmptyValues;
import org.mockito.internal.stubbing.defaultanswers.ReturnsMoreEmptyValues;
import org.mockito.internal.verification.VerificationModeFactory;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import org.mockito.stubbing.DeprecatedOngoingStubbing;
import org.mockito.stubbing.OngoingStubbing;
import org.mockito.stubbing.Stubber;
import org.mockito.stubbing.VoidMethodStubbable;
import org.mockito.verification.VerificationWithTimeout;
import org.mockito.verification.Timeout;
import org.mockito.verification.VerificationMode;
/**
* <p align="left"><img src="logo.jpg"/></p>
* Mockito library enables mocks creation, verification and stubbing.
* <p>
* This javadoc content is also available on the <a href="http://mockito.org">http://mockito.org</a> web page.
* All documentation is kept in javadocs because it guarantees consistency between what's on the web and what's in the source code.
* Also, it makes possible to access documentation straight from the IDE even if you work offline.
*
* <h1>Contents</h1>
*
* <b>
* <a href="#1">1. Let's verify some behaviour! </a><br/>
* <a href="#2">2. How about some stubbing? </a><br/>
* <a href="#3">3. Argument matchers </a><br/>
* <a href="#4">4. Verifying exact number of invocations / at least once / never </a><br/>
* <a href="#5">5. Stubbing void methods with exceptions </a><br/>
* <a href="#6">6. Verification in order </a><br/>
* <a href="#7">7. Making sure interaction(s) never happened on mock </a><br/>
* <a href="#8">8. Finding redundant invocations </a><br/>
* <a href="#9">9. Shorthand for mocks creation - @Mock annotation </a><br/>
* <a href="#10">10. Stubbing consecutive calls (iterator-style stubbing) </a><br/>
* <a href="#11">11. Stubbing with callbacks </a><br/>
* <a href="#12">12. doThrow()|doAnswer()|doNothing()|doReturn() family of methods mostly for stubbing voids </a><br/>
* <a href="#13">13. Spying on real objects </a><br/>
* <a href="#14">14. Changing default return values of unstubbed invocations (Since 1.7) </a><br/>
* <a href="#15">15. Capturing arguments for further assertions (Since 1.8.0) </a><br/>
* <a href="#16">16. Real partial mocks (Since 1.8.0) </a><br/>
* <a href="#17">17. Resetting mocks (Since 1.8.0) </a><br/>
* <a href="#18">18. Troubleshooting & validating framework usage (Since 1.8.0) </a><br/>
* <a href="#19">19. Aliases for behavior driven development (Since 1.8.0) </a><br/>
* <a href="#20">20. Serializable mocks (Since 1.8.1) </a><br/>
* <a href="#21">21. New annotations: @Captor, @Spy, @InjectMocks (Since 1.8.3) </a><br/>
* <a href="#22">22. (**New**) Verification with timeout (Since 1.8.5) </a><br/>
* </b>
*
* <p>
* Following examples mock a List, because everyone knows its interface (methods
* like add(), get(), clear() will be used). <br>
* You probably wouldn't mock List class 'in real'.
*
* <h3 id="1">1. Let's verify some behaviour!</h3>
*
* <pre>
* //Let's import Mockito statically so that the code looks clearer
* import static org.mockito.Mockito.*;
*
* //mock creation
* List mockedList = mock(List.class);
*
* //using mock object
* mockedList.add("one");
* mockedList.clear();
*
* //verification
* verify(mockedList).add("one");
* verify(mockedList).clear();
* </pre>
*
* <p>
* Once created, mock will remember all interactions. Then you can selectively
* verify whatever interaction you are interested in.
*
* <h3 id="2">2. How about some stubbing?</h3>
*
* <pre>
* //You can mock concrete classes, not only interfaces
* LinkedList mockedList = mock(LinkedList.class);
*
* //stubbing
* when(mockedList.get(0)).thenReturn("first");
* when(mockedList.get(1)).thenThrow(new RuntimeException());
*
* //following prints "first"
* System.out.println(mockedList.get(0));
*
* //following throws runtime exception
* System.out.println(mockedList.get(1));
*
* //following prints "null" because get(999) was not stubbed
* System.out.println(mockedList.get(999));
*
* //Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>
* //If your code cares what get(0) returns then something else breaks (often before even verify() gets executed).
* //If your code doesn't care what get(0) returns then it should not be stubbed. Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>.
* verify(mockedList).get(0);
* </pre>
*
* <ul>
* <li> By default, for all methods that return value, mock returns null, an
* empty collection or appropriate primitive/primitive wrapper value (e.g: 0,
* false, ... for int/Integer, boolean/Boolean, ...). </li>
*
* <li> Stubbing can be overridden: for example common stubbing can go to
* fixture setup but the test methods can override it.
* Please note that overridding stubbing is a potential code smell that points out too much stubbing</li>
*
* <li> Once stubbed, the method will always return stubbed value regardless
* of how many times it is called. </li>
*
* <li> Last stubbing is more important - when you stubbed the same method with
* the same arguments many times. </li>
*
* </ul>
*
* <h3 id="3">3. Argument matchers</h3>
*
* Mockito verifies argument values in natural java style: by using an equals() method.
* Sometimes, when extra flexibility is required then you might use argument matchers:
*
* <pre>
* //stubbing using built-in anyInt() argument matcher
* when(mockedList.get(anyInt())).thenReturn("element");
*
* //stubbing using hamcrest (let's say isValid() returns your own hamcrest matcher):
* when(mockedList.contains(argThat(isValid()))).thenReturn("element");
*
* //following prints "element"
* System.out.println(mockedList.get(999));
*
* //<b>you can also verify using an argument matcher</b>
* verify(mockedList).get(anyInt());
* </pre>
*
* <p>
* Argument matchers allow flexible verification or stubbing.
* {@link Matchers Click here to see} more built-in matchers
* and examples of <b>custom argument matchers / hamcrest matchers</b>.
* <p>
* For information solely on <b>custom argument matchers</b> check out javadoc for {@link ArgumentMatcher} class.
* <p>
* Be reasonable with using complicated argument matching.
* The natural matching style using equals() with occasional anyX() matchers tend to give clean & simple tests.
* Sometimes it's just better to refactor the code to allow equals() matching or even implement equals() method to help out with testing.
* <p>
* Also, read <a href="#15">section 15</a> or javadoc for {@link ArgumentCaptor} class.
* {@link ArgumentCaptor} is a special implementation of an argument matcher that captures argument values for further assertions.
* <p>
* <b>Warning on argument matchers:</b>
* <p>
* If you are using argument matchers, <b>all arguments</b> have to be provided
* by matchers.
* <p>
* E.g: (example shows verification but the same applies to stubbing):
*
* <pre>
* verify(mock).someMethod(anyInt(), anyString(), <b>eq("third argument")</b>);
* //above is correct - eq() is also an argument matcher
*
* verify(mock).someMethod(anyInt(), anyString(), <b>"third argument"</b>);
* //above is incorrect - exception will be thrown because third argument is given without an argument matcher.
* </pre>
*
* <p>
* Matcher methods like anyObject(), eq() <b>do not</b> return matchers.
* Internally, they record a matcher on a stack and return a dummy value (usually null).
* This implementation is due static type safety imposed by java compiler.
* The consequence is that you cannot use anyObject(), eq() methods outside of verified/stubbed method.
*
* <h3 id="4">4. Verifying exact number of invocations / at least x / never</h3>
*
* <pre>
* //using mock
* mockedList.add("once");
*
* mockedList.add("twice");
* mockedList.add("twice");
*
* mockedList.add("three times");
* mockedList.add("three times");
* mockedList.add("three times");
*
* //following two verifications work exactly the same - times(1) is used by default
* verify(mockedList).add("once");
* verify(mockedList, times(1)).add("once");
*
* //exact number of invocations verification
* verify(mockedList, times(2)).add("twice");
* verify(mockedList, times(3)).add("three times");
*
* //verification using never(). never() is an alias to times(0)
* verify(mockedList, never()).add("never happened");
*
* //verification using atLeast()/atMost()
* verify(mockedList, atLeastOnce()).add("three times");
* verify(mockedList, atLeast(2)).add("five times");
* verify(mockedList, atMost(5)).add("three times");
*
* </pre>
*
* <p>
* <b>times(1) is the default.</b> Therefore using times(1) explicitly can be
* omitted.
*
* <h3 id="5">5. Stubbing void methods with exceptions</h3>
*
* <pre>
* doThrow(new RuntimeException()).when(mockedList).clear();
*
* //following throws RuntimeException:
* mockedList.clear();
* </pre>
*
* Read more about doThrow|doAnswer family of methods in paragraph 12.
* <p>
* Initially, {@link Mockito#stubVoid(Object)} was used for stubbing voids.
* Currently stubVoid() is deprecated in favor of {@link Mockito#doThrow(Throwable)}.
* This is because of improved readability and consistency with the family of {@link Mockito#doAnswer(Answer)} methods.
*
* <h3 id="6">6. Verification in order</h3>
*
* <pre>
* // A. Single mock whose methods must be invoked in a particular order
* List singleMock = mock(List.class);
*
* //using a single mock
* singleMock.add("was added first");
* singleMock.add("was added second");
*
* //create an inOrder verifier for a single mock
* InOrder inOrder = inOrder(singleMock);
*
* //following will make sure that add is first called with "was added first, then with "was added second"
* inOrder.verify(singleMock).add("was added first");
* inOrder.verify(singleMock).add("was added second");
*
* // B. Multiple mocks that must be used in a particular order
* List firstMock = mock(List.class);
* List secondMock = mock(List.class);
*
* //using mocks
* firstMock.add("was called first");
* secondMock.add("was called second");
*
* //create inOrder object passing any mocks that need to be verified in order
* InOrder inOrder = inOrder(firstMock, secondMock);
*
* //following will make sure that firstMock was called before secondMock
* inOrder.verify(firstMock).add("was called first");
* inOrder.verify(secondMock).add("was called second");
*
* // Oh, and A + B can be mixed together at will
* </pre>
*
* Verification in order is flexible - <b>you don't have to verify all
* interactions</b> one-by-one but only those that you are interested in
* testing in order.
* <p>
* Also, you can create InOrder object passing only mocks that are relevant for
* in-order verification.
*
* <h3 id="7">7. Making sure interaction(s) never happened on mock</h3>
*
* <pre>
* //using mocks - only mockOne is interacted
* mockOne.add("one");
*
* //ordinary verification
* verify(mockOne).add("one");
*
* //verify that method was never called on a mock
* verify(mockOne, never()).add("two");
*
* //verify that other mocks were not interacted
* verifyZeroInteractions(mockTwo, mockThree);
*
* </pre>
*
* <h3 id="8">8. Finding redundant invocations</h3>
*
* <pre>
* //using mocks
* mockedList.add("one");
* mockedList.add("two");
*
* verify(mockedList).add("one");
*
* //following verification will fail
* verifyNoMoreInteractions(mockedList);
* </pre>
*
* A word of <b>warning</b>:
* Some users who did a lot of classic, expect-run-verify mocking tend to use verifyNoMoreInteractions() very often, even in every test method.
* verifyNoMoreInteractions() is not recommended to use in every test method.
* verifyNoMoreInteractions() is a handy assertion from the interaction testing toolkit. Use it only when it's relevant.
* Abusing it leads to overspecified, less maintainable tests. You can find further reading
* <a href="http://monkeyisland.pl/2008/07/12/should-i-worry-about-the-unexpected/">here</a>.
*
* <p>
* See also {@link Mockito#never()} - it is more explicit and
* communicates the intent well.
* <p>
*
* <h3 id="9">9. Shorthand for mocks creation - @Mock annotation</h3>
*
* <ul>
* <li>Minimizes repetitive mock creation code.</li>
* <li>Makes the test class more readable.</li>
* <li>Makes the verification error easier to read because the <b>field name</b>
* is used to identify the mock.</li>
* </ul>
*
* <pre>
* public class ArticleManagerTest {
*
* @Mock private ArticleCalculator calculator;
* @Mock private ArticleDatabase database;
* @Mock private UserProvider userProvider;
*
* private ArticleManager manager;
* </pre>
*
* <b>Important!</b> This needs to be somewhere in the base class or a test
* runner:
*
* <pre>
* MockitoAnnotations.initMocks(testClass);
* </pre>
*
* You can use built-in runner: {@link MockitoJUnitRunner}.
* <p>
* Read more here: {@link MockitoAnnotations}
*
* <h3 id="10"> 10. Stubbing consecutive calls (iterator-style stubbing)</h3>
*
* Sometimes we need to stub with different return value/exception for the same
* method call. Typical use case could be mocking iterators.
* Original version of Mockito did not have this feature to promote simple mocking.
* For example, instead of iterators one could use {@link Iterable} or simply
* collections. Those offer natural ways of stubbing (e.g. using real
* collections). In rare scenarios stubbing consecutive calls could be useful,
* though:
* <p>
*
* <pre>
* when(mock.someMethod("some arg"))
* .thenThrow(new RuntimeException())
* .thenReturn("foo");
*
* //First call: throws runtime exception:
* mock.someMethod("some arg");
*
* //Second call: prints "foo"
* System.out.println(mock.someMethod("some arg"));
*
* //Any consecutive call: prints "foo" as well (last stubbing wins).
* System.out.println(mock.someMethod("some arg"));
* </pre>
*
* Alternative, shorter version of consecutive stubbing:
*
* <pre>
* when(mock.someMethod("some arg"))
* .thenReturn("one", "two", "three");
* </pre>
*
* <h3 id="11"> 11. Stubbing with callbacks</h3>
*
* Allows stubbing with generic {@link Answer} interface.
* <p>
* Yet another controversial feature which was not included in Mockito
* originally. We recommend using simple stubbing with thenReturn() or
* thenThrow() only. Those two should be <b>just enough</b> to test/test-drive
* any clean & simple code.
*
* <pre>
* when(mock.someMethod(anyString())).thenAnswer(new Answer() {
* Object answer(InvocationOnMock invocation) {
* Object[] args = invocation.getArguments();
* Object mock = invocation.getMock();
* return "called with arguments: " + args;
* }
* });
*
* //Following prints "called with arguments: foo"
* System.out.println(mock.someMethod("foo"));
* </pre>
*
* <h3 id="12"> 12. doThrow()|doAnswer()|doNothing()|doReturn() family of methods for stubbing voids (mostly)</h3>
*
* Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets...
* <p>
* {@link Mockito#doThrow(Throwable)} replaces the {@link Mockito#stubVoid(Object)} method for stubbing voids.
* The main reason is improved readability and consistency with the family of doAnswer() methods.
* <p>
* Use doThrow() when you want to stub a void method with an exception:
* <pre>
* doThrow(new RuntimeException()).when(mockedList).clear();
*
* //following throws RuntimeException:
* mockedList.clear();
* </pre>
*
* Read more about other methods:
* <p>
* {@link Mockito#doThrow(Throwable)}
* <p>
* {@link Mockito#doAnswer(Answer)}
* <p>
* {@link Mockito#doNothing()}
* <p>
* {@link Mockito#doReturn(Object)}
*
* <h3 id="13"> 13. Spying on real objects</h3>
*
* You can create spies of real objects. When you use the spy then the <b>real</b> methods are called (unless a method was stubbed).
* <p>
* Real spies should be used <b>carefully and occasionally</b>, for example when dealing with legacy code.
*
* <p>
* Spying on real objects can be associated with "partial mocking" concept.
* <b>Before the release 1.8</b>, Mockito spies were not real partial mocks.
* The reason was we thought partial mock is a code smell.
* At some point we found legitimate use cases for partial mocks
* (3rd party interfaces, interim refactoring of legacy code, the full article is <a href=
* "http://monkeyisland.pl/2009/01/13/subclass-and-override-vs-partial-mocking-vs-refactoring"
* >here</a>)
* <p>
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //optionally, you can stub out some methods:
* when(spy.size()).thenReturn(100);
*
* //using the spy calls <b>real</b> methods
* spy.add("one");
* spy.add("two");
*
* //prints "one" - the first element of a list
* System.out.println(spy.get(0));
*
* //size() method was stubbed - 100 is printed
* System.out.println(spy.size());
*
* //optionally, you can verify
* verify(spy).add("one");
* verify(spy).add("two");
* </pre>
*
* <h4>Important gotcha on spying real objects!</h4>
*
* 1. Sometimes it's impossible to use {@link Mockito#when(Object)} for stubbing spies. Example:
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty)
* when(spy.get(0)).thenReturn("foo");
*
* //You have to use doReturn() for stubbing
* doReturn("foo").when(spy).get(0);
* </pre>
*
* 2. Watch out for final methods.
* Mockito doesn't mock final methods so the bottom line is: when you spy on real objects + you try to stub a final method = trouble.
* What will happen is the real method will be called *on mock* but *not on the real instance* you passed to the spy() method.
* Typically you may get a NullPointerException because mock instances don't have fields initiated.
*
* <h3 id="14">14. Changing default return values of unstubbed invocations (Since 1.7) </h3>
*
* You can create a mock with specified strategy for its return values.
* It's quite advanced feature and typically you don't need it to write decent tests.
* However, it can be helpful for working with <b>legacy systems</b>.
* <p>
* It is the default answer so it will be used <b>only when you don't</b> stub the method call.
*
* <pre>
* Foo mock = mock(Foo.class, Mockito.RETURNS_SMART_NULLS);
* Foo mockTwo = mock(Foo.class, new YourOwnAnswer());
* </pre>
*
* <p>
* Read more about this interesting implementation of <i>Answer</i>: {@link Mockito#RETURNS_SMART_NULLS}
*
* <h3 id="15">15. Capturing arguments for further assertions (Since 1.8.0) </h3>
*
* Mockito verifies argument values in natural java style: by using an equals() method.
* This is also the recommended way of matching arguments because it makes tests clean & simple.
* In some situations though, it is helpful to assert on certain arguments after the actual verification.
* For example:
* <pre>
* ArgumentCaptor<Person> argument = ArgumentCaptor.forClass(Person.class);
* verify(mock).doSomething(argument.capture());
* assertEquals("John", argument.getValue().getName());
* </pre>
*
* <b>Warning:</b> it is recommended to use ArgumentCaptor with verification <b>but not</b> with stubbing.
* Using ArgumentCaptor with stubbing may decrease test readability because captor is created outside of assert (aka verify or 'then') block.
* Also it may reduce defect localization because if stubbed method was not called then no argument is captured.
* <p>
* In a way ArgumentCaptor is related to custom argument matchers (see javadoc for {@link ArgumentMatcher} class).
* Both techniques can be used for making sure certain arguments where passed to mocks.
* However, ArgumentCaptor may be a better fit if:
* <ul>
* <li>custom argument matcher is not likely to be reused</li>
* <li>you just need it to assert on argument values to complete verification</li>
* </ul>
* Custom argument matchers via {@link ArgumentMatcher} are usually better for stubbing.
*
* <h3 id="16">16. Real partial mocks (Since 1.8.0) </h3>
*
* Finally, after many internal debates & discussions on the mailing list, partial mock support was added to Mockito.
* Previously we considered partial mocks as code smells. However, we found a legitimate use case for partial mocks - more reading:
* <a href="http://monkeyisland.pl/2009/01/13/subclass-and-override-vs-partial-mocking-vs-refactoring">here</a>
* <p>
* <b>Before release 1.8</b> spy() was not producing real partial mocks and it was confusing for some users.
* Read more about spying: <a href="#13">here</a> or in javadoc for {@link Mockito#spy(Object)} method.
* <p>
* <pre>
* //you can create partial mock with spy() method:
* List list = spy(new LinkedList());
*
* //you can enable partial mock capabilities selectively on mocks:
* Foo mock = mock(Foo.class);
* //Be sure the real implementation is 'safe'.
* //If real implementation throws exceptions or depends on specific state of the object then you're in trouble.
* when(mock.someMethod()).thenCallRealMethod();
* </pre>
*
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven & well-designed code.
*
* <h3 id="17">17. Resetting mocks (Since 1.8.0) </h3>
*
* Smart Mockito users hardly use this feature because they know it could be a sign of poor tests.
* Normally, you don't need to reset your mocks, just create new mocks for each test method.
* <p>
* Instead of reset() please consider writing simple, small and focused test methods over lengthy, over-specified tests.
* <b>First potential code smell is reset() in the middle of the test method.</b> This probably means you're testing too much.
* Follow the whisper of your test methods: "Please keep us small & focused on single behavior".
* There are several threads about it on mockito mailing list.
* <p>
* The only reason we added reset() method is to
* make it possible to work with container-injected mocks.
* See issue 55 (<a href="http://code.google.com/p/mockito/issues/detail?id=55">here</a>)
* or FAQ (<a href="http://code.google.com/p/mockito/wiki/FAQ">here</a>).
* <p>
* <b>Don't harm yourself.</b> reset() in the middle of the test method is a code smell (you're probably testing too much).
* <pre>
* List mock = mock(List.class);
* when(mock.size()).thenReturn(10);
* mock.add(1);
*
* reset(mock);
* //at this point the mock forgot any interactions & stubbing
* </pre>
*
* <h3 id="18">18. Troubleshooting & validating framework usage (Since 1.8.0) </h3>
*
* First of all, in case of any trouble, I encourage you to read the Mockito FAQ:
* <a href="http://code.google.com/p/mockito/wiki/FAQ">http://code.google.com/p/mockito/wiki/FAQ</a>
* <p>
* In case of questions you may also post to mockito mailing list:
* <a href="http://groups.google.com/group/mockito">http://groups.google.com/group/mockito</a>
* <p>
* Next, you should know that Mockito validates if you use it correctly <b>all the time</b>.
* However, there's a gotcha so please read the javadoc for {@link Mockito#validateMockitoUsage()}
*
* <h3 id="19">19. Aliases for behavior driven development (Since 1.8.0) </h3>
*
* Behavior Driven Development style of writing tests uses <b>//given //when //then</b> comments as fundamental parts of your test methods.
* This is exactly how we write our tests and we warmly encourage you to do so!
* <p>
* Start learning about BDD here: <a href="http://en.wikipedia.org/wiki/Behavior_Driven_Development">http://en.wikipedia.org/wiki/Behavior_Driven_Development</a>
* <p>
* The problem is that current stubbing api with canonical role of <b>when</b> word does not integrate nicely with <b>//given //when //then</b> comments.
* It's because stubbing belongs to <b>given</b> component of the test and not to the <b>when</b> component of the test.
* Hence {@link BDDMockito} class introduces an alias so that you stub method calls with {@link BDDMockito#given(Object)} method.
* Now it really nicely integrates with the <b>given</b> component of a BDD style test!
* <p>
* Here is how the test might look like:
* <pre>
* import static org.mockito.BDDMockito.*;
*
* Seller seller = mock(Seller.class);
* Shop shop = new Shop(seller);
*
* public void shouldBuyBread() throws Exception {
* //given
* given(seller.askForBread()).willReturn(new Bread());
*
* //when
* Goods goods = shop.buyBread();
*
* //then
* assertThat(goods, containBread());
* }
* </pre>
*
* <h3 id="20">20. (**New**) Serializable mocks (Since 1.8.1) </h3>
*
* Mocks can be made serializable. With this feature you can use a mock in a place that requires dependencies to be serializable.
* <p>
* WARNING: This should be rarely used in unit testing.
* <p>
* The behaviour was implemented for a specific use case of a BDD spec that had an unreliable external dependency. This
* was in a web environment and the objects from the external dependency were being serialized to pass between layers.
* <p>
* To create serializable mock use {@link MockSettings#serializable()}:
* <pre>
* List serializableMock = mock(List.class, withSettings().serializable());
* </pre>
* <p>
* The mock can be serialized assuming all the normal <a href='http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html'>
* serialization requirements</a> are met by the class.
* <p>
* Making a real object spy serializable is a bit more effort as the spy(...) method does not have an overloaded version
* which accepts MockSettings. No worries, you will hardly ever use it.
*
* <pre>
* List<Object> list = new ArrayList<Object>();
* List<Object> spy = mock(ArrayList.class, withSettings()
* .spiedInstance(list)
* .defaultAnswer(CALLS_REAL_METHODS)
* .serializable());
* </pre>
*
* <h3 id="21">21. (**New**) New annotations: @Captor, @Spy, @InjectMocks (Since 1.8.3) </h3>
* <p>
* Release 1.8.3 brings new annotations that may be helpful on occasion:
*
* <ul>
* <li>@{@link Captor} simplifies creation of {@link ArgumentCaptor}
* - useful when the argument to capture is a nasty generic class and you want to avoid compiler warnings
* <li>@{@link Spy} - you can use it instead {@link Mockito#spy(Object)}.
* <li>@{@link InjectMocks} - injects mock or spy fields into tested object automatically.
* </ul>
*
* <p>
* Note that @{@link InjectMocks} can only be used in combination with the @{@link Spy} annotation, it means
* that Mockito will inject mocks in a partial mock under testing. As a remainder, please read point 16 about partial mocks.
*
* <p>
* All new annotations are *only* processed on {@link MockitoAnnotations#initMocks(Object)}.
* As for @{@link Mock} annotation you can use the built-in runner: {@link MockitoJUnitRunner}.
* <p>
* <h3 id="22">22. (**New**) Verification with timeout (Since 1.8.5) </h3>
* <p>
* Allows verifying with timeout. May be useful for testing in concurrent conditions.
* <p>
* It feels this feature should be used rarely - figure out a better way of testing your multi-threaded system.
* <p>
* Not yet implemented to work with InOrder verification.
* <p>
* Examples:
* <p>
* <pre>
* //passes when someMethod() is called within given time span
* verify(mock, timeout(100)).someMethod();
* //above is an alias to:
* verify(mock, timeout(100).times(1)).someMethod();
*
* //passes when someMethod() is called *exactly* 2 times within given time span
* verify(mock, timeout(100).times(2)).someMethod();
*
* //passes when someMethod() is called *at lest* 2 times within given time span
* verify(mock, timeout(100).atLeast(2)).someMethod();
*
* //verifies someMethod() within given time span using given verification mode
* //useful only if you have your own custom verification modes.
* verify(mock, new Timeout(100, yourOwnVerificationMode)).someMethod();
* </pre>
*
* <h3 id="21">23. (**New**) Automatic initialisation of @Spy, @InjectMocks fields (Since 1.8.6) </h3>
* <p>
* Mockito will now try to initialise @{@link Spy} and @{@link InjectMocks} fields if and only if the type has
* a zero-arg argument, even private.
*
* <p>
* This is especially useful if you are testing an object with a Joshua Bloch Builder Pattern (see Effective Java Ed. 2008, §2.Item 2)
*
* <p>
* Example :
*
* <pre>
* // The type to test
* public class TooMuchComplicated {
* private List subItems;
* // other collaborators
*
* public void someBehaviourToTest() {}
*
* // no-argument constructor
* private TooMuchComplicated()
*
* private TooMuchComplicated(Builder builder) {}
*
* public static class Builder {
* public Builder withSubItems(Object subItems ...) {}
* // other builder methods
* public TooMuchComplicated build() { return new TooMuchComplicated(this); }
* }
* }
*
* // In your test
* @RunWith(MockitoJUnitRunner.class)
* public class TooMuchComplicatedTest {
* @Mock List subItems;
* // other mocked collaborators
* @InjectMocks TooMuchComplicated tested;
*
* // tests
* }
* </pre>
*
*/
@SuppressWarnings("unchecked")
public class Mockito extends Matchers {
static final MockitoCore MOCKITO_CORE = new MockitoCore();
/**
* The default Answer of every mock <b>if</b> the mock was not stubbed.
* Typically it just returns some empty value.
* <p>
* {@link Answer} can be used to define the return values of unstubbed invocations.
* <p>
* This implementation first tries the global configuration.
* If there is no global configuration then it uses {@link ReturnsEmptyValues} (returns zeros, empty collections, nulls, etc.)
*/
public static final Answer<Object> RETURNS_DEFAULTS = Answers.RETURNS_DEFAULTS.get();
/**
* Optional Answer to be used with {@link Mockito#mock(Class, Answer)}
* <p>
* {@link Answer} can be used to define the return values of unstubbed invocations.
* <p>
* This implementation can be helpful when working with legacy code.
* Unstubbed methods often return null. If your code uses the object returned by an unstubbed call you get a NullPointerException.
* This implementation of Answer <b>returns SmartNull instead of null</b>.
* SmartNull gives nicer exception message than NPE because it points out the line where unstubbed method was called. You just click on the stack trace.
* <p>
* ReturnsSmartNulls first tries to return ordinary return values (see {@link ReturnsMoreEmptyValues})
* then it tries to return SmartNull. If the return type is final then plain null is returned.
* <p>
* ReturnsSmartNulls will be probably the default return values strategy in Mockito 2.0
* <p>
* Example:
* <pre>
* Foo mock = (Foo.class, RETURNS_SMART_NULLS);
*
* //calling unstubbed method here:
* Stuff stuff = mock.getStuff();
*
* //using object returned by unstubbed call:
* stuff.doSomething();
*
* //Above doesn't yield NullPointerException this time!
* //Instead, SmartNullPointerException is thrown.
* //Exception's cause links to unstubbed <i>mock.getStuff()</i> - just click on the stack trace.
* </pre>
*/
public static final Answer<Object> RETURNS_SMART_NULLS = Answers.RETURNS_SMART_NULLS.get();
/**
* Optional Answer to be used with {@link Mockito#mock(Class, Answer)}
* <p>
* {@link Answer} can be used to define the return values of unstubbed invocations.
* <p>
* This implementation can be helpful when working with legacy code.
* <p>
* ReturnsMocks first tries to return ordinary return values (see {@link ReturnsMoreEmptyValues})
* then it tries to return mocks. If the return type cannot be mocked (e.g. is final) then plain null is returned.
* <p>
*/
public static final Answer<Object> RETURNS_MOCKS = Answers.RETURNS_MOCKS.get();
/**
* Optional Answer to be used with {@link Mockito#mock(Class, Answer)}
* <p>
* Example that shows how deep stub works:
* <pre>
* Foo mock = mock(Foo.class, RETURNS_DEEP_STUBS);
*
* // note that we're stubbing a chain of methods here: getBar().getName()
* when(mock.getBar().getName()).thenReturn("deep");
*
* // note that we're chaining method calls: getBar().getName()
* assertEquals("deep", mock.getBar().getName());
* </pre>
*
* <strong>Verification API does not support 'chaining'</strong> so deep stub doesn't change how you do verification.
* <p>
* <strong>WARNING: </strong>
* This feature should rarely be required for regular clean code! Leave it for legacy code.
* Mocking a mock to return a mock, to return a mock, (...), to return something meaningful
* hints at violation of Law of Demeter or mocking a value object (a well known anti-pattern).
* <p>
* Good quote I've seen one day on the web: <strong>every time a mock returns a mock a fairy dies</strong>.
* <p>
* How deep stub work internally?
* <pre>
* //this:
* Foo mock = mock(Foo.class, RETURNS_DEEP_STUBS);
* when(mock.getBar().getName(), "deep");
*
* //is equivalent of
* Foo foo = mock(Foo.class);
* Bar bar = mock(Bar.class);
* when(foo.getBar()).thenReturn(bar);
* when(bar.getName()).thenReturn("deep");
* </pre>
* <p>
* This feature will not work when any return type of methods included in the chain cannot be mocked
* (for example: is a primitive or a final class). This is because of java type system.
*/
public static final Answer<Object> RETURNS_DEEP_STUBS = Answers.RETURNS_DEEP_STUBS.get();
/**
* Optional Answer to be used with {@link Mockito#mock(Class, Answer)}
* <p>
* {@link Answer} can be used to define the return values of unstubbed invocations.
* <p>
* This implementation can be helpful when working with legacy code.
* When this implementation is used, unstubbed methods will delegate to the real implementation.
* This is a way to create a partial mock object that calls real methods by default.
* <p>
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven & well-designed code.
* <p>
* Example:
* <pre>
* Foo mock = mock(Foo.class, CALLS_REAL_METHODS);
*
* // this calls the real implementation of Foo.getSomething()
* value = mock.getSomething();
*
* when(mock.getSomething()).thenReturn(fakeValue);
*
* // now fakeValue is returned
* value = mock.getSomething();
* </pre>
*/
public static final Answer<Object> CALLS_REAL_METHODS = Answers.CALLS_REAL_METHODS.get();
/**
* Creates mock object of given class or interface.
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param classToMock class or interface to mock
* @return mock object
*/
public static <T> T mock(Class<T> classToMock) {
return mock(classToMock, withSettings().defaultAnswer(RETURNS_DEFAULTS));
}
/**
* Specifies mock name. Naming mocks can be helpful for debugging - the name is used in all verification errors.
* <p>
* Beware that naming mocks is not a solution for complex code which uses too many mocks or collaborators.
* <b>If you have too many mocks then refactor the code</b> so that it's easy to test/debug without necessity of naming mocks.
* <p>
* <b>If you use @Mock annotation then you've got naming mocks for free!</b> @Mock uses field name as mock name. {@link Mock Read more.}
* <p>
*
* See examples in javadoc for {@link Mockito} class
*
* @param classToMock class or interface to mock
* @param name of the mock
* @return mock object
*/
public static <T> T mock(Class<T> classToMock, String name) {
return mock(classToMock, withSettings()
.name(name)
.defaultAnswer(RETURNS_DEFAULTS));
}
/**
* @deprecated
* <b>Please use mock(Foo.class, defaultAnswer);</b>
* <p>
* See {@link Mockito#mock(Class, Answer)}
* <p>
* Why it is deprecated? ReturnValues is being replaced by Answer
* for better consistency & interoperability of the framework.
* Answer interface has been in Mockito for a while and it has the same responsibility as ReturnValues.
* There's no point in mainting exactly the same interfaces.
* <p>
* Creates mock with a specified strategy for its return values.
* It's quite advanced feature and typically you don't need it to write decent tests.
* However it can be helpful when working with legacy systems.
* <p>
* Obviously return values are used only when you don't stub the method call.
*
* <pre>
* Foo mock = mock(Foo.class, Mockito.RETURNS_SMART_NULLS);
* Foo mockTwo = mock(Foo.class, new YourOwnReturnValues());
* </pre>
*
* <p>See examples in javadoc for {@link Mockito} class</p>
*
* @param classToMock class or interface to mock
* @param returnValues default return values for unstubbed methods
*
* @return mock object
*/
@Deprecated
public static <T> T mock(Class<T> classToMock, ReturnValues returnValues) {
return mock(classToMock, withSettings().defaultAnswer(new AnswerReturnValuesAdapter(returnValues)));
}
/**
* Creates mock with a specified strategy for its answers to interactions.
* It's quite advanced feature and typically you don't need it to write decent tests.
* However it can be helpful when working with legacy systems.
* <p>
* It is the default answer so it will be used <b>only when you don't</b> stub the method call.
*
* <pre>
* Foo mock = mock(Foo.class, RETURNS_SMART_NULLS);
* Foo mockTwo = mock(Foo.class, new YourOwnAnswer());
* </pre>
*
* <p>See examples in javadoc for {@link Mockito} class</p>
*
* @param classToMock class or interface to mock
* @param defaultAnswer default answer for unstubbed methods
*
* @return mock object
*/
public static <T> T mock(Class<T> classToMock, Answer defaultAnswer) {
return mock(classToMock, withSettings().defaultAnswer(defaultAnswer));
}
/**
* Creates a mock with some non-standard settings.
* <p>
* The number of configuration points for a mock grows
* so we need a fluent way to introduce new configuration without adding more and more overloaded Mockito.mock() methods.
* Hence {@link MockSettings}.
* <pre>
* Listener mock = mock(Listener.class, withSettings()
* .name("firstListner").defaultBehavior(RETURNS_SMART_NULLS));
* );
* </pre>
* <b>Use it carefully and occasionally</b>. What might be reason your test needs non-standard mocks?
* Is the code under test so complicated that it requires non-standard mocks?
* Wouldn't you prefer to refactor the code under test so it is testable in a simple way?
* <p>
* See also {@link Mockito#withSettings()}
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param classToMock class or interface to mock
* @param mockSettings additional mock settings
* @return mock object
*/
public static <T> T mock(Class<T> classToMock, MockSettings mockSettings) {
return MOCKITO_CORE.mock(classToMock, mockSettings);
}
/**
* Creates a spy of the real object. The spy calls <b>real</b> methods unless they are stubbed.
* <p>
* Real spies should be used <b>carefully and occasionally</b>, for example when dealing with legacy code.
* <p>
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven & well-designed code.
* <p>
* Example:
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //optionally, you can stub out some methods:
* when(spy.size()).thenReturn(100);
*
* //using the spy calls <b>real</b> methods
* spy.add("one");
* spy.add("two");
*
* //prints "one" - the first element of a list
* System.out.println(spy.get(0));
*
* //size() method was stubbed - 100 is printed
* System.out.println(spy.size());
*
* //optionally, you can verify
* verify(spy).add("one");
* verify(spy).add("two");
* </pre>
*
* <h4>Important gotcha on spying real objects!</h4>
*
* 1. Sometimes it's impossible to use {@link Mockito#when(Object)} for stubbing spies. Example:
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty)
* when(spy.get(0)).thenReturn("foo");
*
* //You have to use doReturn() for stubbing
* doReturn("foo").when(spy).get(0);
* </pre>
*
* 2. Watch out for final methods.
* Mockito doesn't mock final methods so the bottom line is: when you spy on real objects + you try to stub a final method = trouble.
* What will happen is the real method will be called *on mock* but *not on the real instance* you passed to the spy() method.
* Typically you may get a NullPointerException because mock instances don't have fields initiated.
*
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param object
* to spy on
* @return a spy of the real object
*/
public static <T> T spy(T object) {
return MOCKITO_CORE.mock((Class<T>) object.getClass(), withSettings()
.spiedInstance(object)
.defaultAnswer(CALLS_REAL_METHODS));
}
/**
* Stubs a method call with return value or an exception. E.g:
*
* <pre>
* stub(mock.someMethod()).toReturn(10);
*
* //you can use flexible argument matchers, e.g:
* stub(mock.someMethod(<b>anyString()</b>)).toReturn(10);
*
* //setting exception to be thrown:
* stub(mock.someMethod("some arg")).toThrow(new RuntimeException());
*
* //you can stub with different behavior for consecutive method calls.
* //Last stubbing (e.g: toReturn("foo")) determines the behavior for further consecutive calls.
* stub(mock.someMethod("some arg"))
* .toThrow(new RuntimeException())
* .toReturn("foo");
* </pre>
* <p>
* Some users find stub() confusing therefore {@link Mockito#when(Object)} is recommended over stub()
* <pre>
* //Instead of:
* stub(mock.count()).toReturn(10);
*
* //You can do:
* when(mock.count()).thenReturn(10);
* </pre>
* For stubbing void methods with throwables see: {@link Mockito#doThrow(Throwable)}
* <p>
* Stubbing can be overridden: for example common stubbing can go to fixture
* setup but the test methods can override it.
* Please note that overridding stubbing is a potential code smell that points out too much stubbing.
* <p>
* Once stubbed, the method will always return stubbed value regardless
* of how many times it is called.
* <p>
* Last stubbing is more important - when you stubbed the same method with
* the same arguments many times.
* <p>
* Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>.
* Let's say you've stubbed foo.bar().
* If your code cares what foo.bar() returns then something else breaks(often before even verify() gets executed).
* If your code doesn't care what get(0) returns then it should not be stubbed.
* Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>.
*
* @param methodCall
* method call
* @return DeprecatedOngoingStubbing object to set stubbed value/exception
*/
public static <T> DeprecatedOngoingStubbing<T> stub(T methodCall) {
return MOCKITO_CORE.stub(methodCall);
}
/**
* Enables stubbing methods. Use it when you want the mock to return particular value when particular method is called.
* <p>
* Simply put: "<b>When</b> the x method is called <b>then</b> return y".
* <p>
* <b>when() is a successor of deprecated {@link Mockito#stub(Object)}</b>
* <p>
* Examples:
*
* <pre>
* <b>when</b>(mock.someMethod()).<b>thenReturn</b>(10);
*
* //you can use flexible argument matchers, e.g:
* when(mock.someMethod(<b>anyString()</b>)).thenReturn(10);
*
* //setting exception to be thrown:
* when(mock.someMethod("some arg")).thenThrow(new RuntimeException());
*
* //you can set different behavior for consecutive method calls.
* //Last stubbing (e.g: thenReturn("foo")) determines the behavior of further consecutive calls.
* when(mock.someMethod("some arg"))
* .thenThrow(new RuntimeException())
* .thenReturn("foo");
*
* //Alternative, shorter version for consecutive stubbing:
* when(mock.someMethod("some arg"))
* .thenReturn("one", "two");
* //is the same as:
* when(mock.someMethod("some arg"))
* .thenReturn("one")
* .thenReturn("two");
*
* //shorter version for consecutive method calls throwing exceptions:
* when(mock.someMethod("some arg"))
* .thenThrow(new RuntimeException(), new NullPointerException();
*
* </pre>
*
* For stubbing void methods with throwables see: {@link Mockito#doThrow(Throwable)}
* <p>
* Stubbing can be overridden: for example common stubbing can go to fixture
* setup but the test methods can override it.
* Please note that overridding stubbing is a potential code smell that points out too much stubbing.
* <p>
* Once stubbed, the method will always return stubbed value regardless
* of how many times it is called.
* <p>
* Last stubbing is more important - when you stubbed the same method with
* the same arguments many times.
* <p>
* Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>.
* Let's say you've stubbed foo.bar().
* If your code cares what foo.bar() returns then something else breaks(often before even verify() gets executed).
* If your code doesn't care what get(0) returns then it should not be stubbed.
* Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>.
*
* <p>
* See examples in javadoc for {@link Mockito} class
* @param methodCall method to be stubbed
*/
public static <T> OngoingStubbing<T> when(T methodCall) {
return MOCKITO_CORE.when(methodCall);
}
/**
* Verifies certain behavior <b>happened once</b>
* <p>
* Alias to <code>verify(mock, times(1))</code> E.g:
* <pre>
* verify(mock).someMethod("some arg");
* </pre>
* Above is equivalent to:
* <pre>
* verify(mock, times(1)).someMethod("some arg");
* </pre>
* <p>
* Arguments passed are compared using equals() method.
* Read about {@link ArgumentCaptor} or {@link ArgumentMatcher} to find out other ways of matching / asserting arguments passed.
* <p>
* Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>.
* Let's say you've stubbed foo.bar().
* If your code cares what foo.bar() returns then something else breaks(often before even verify() gets executed).
* If your code doesn't care what get(0) returns then it should not be stubbed.
* Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>.
*
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param mock to be verified
* @return mock object itself
*/
public static <T> T verify(T mock) {
return MOCKITO_CORE.verify(mock, times(1));
}
/**
* Verifies certain behavior happened at least once / exact number of times / never. E.g:
* <pre>
* verify(mock, times(5)).someMethod("was called five times");
*
* verify(mock, atLeast(2)).someMethod("was called at least two times");
*
* //you can use flexible argument matchers, e.g:
* verify(mock, atLeastOnce()).someMethod(<b>anyString()</b>);
* </pre>
*
* <b>times(1) is the default</b> and can be omitted
* <p>
* Arguments passed are compared using equals() method.
* Read about {@link ArgumentCaptor} or {@link ArgumentMatcher} to find out other ways of matching / asserting arguments passed.
* <p>
*
* @param mock to be verified
* @param mode times(x), atLeastOnce() or never()
*
* @return mock object itself
*/
public static <T> T verify(T mock, VerificationMode mode) {
return MOCKITO_CORE.verify(mock, mode);
}
/**
* Smart Mockito users hardly use this feature because they know it could be a sign of poor tests.
* Normally, you don't need to reset your mocks, just create new mocks for each test method.
* <p>
* Instead of reset() please consider writing simple, small and focused test methods over lengthy, over-specified tests.
* <b>First potential code smell is reset() in the middle of the test method.</b> This probably means you're testing too much.
* Follow the whisper of your test methods: "Please keep us small & focused on single behavior".
* There are several threads about it on mockito mailing list.
* <p>
* The only reason we added reset() method is to
* make it possible to work with container-injected mocks.
* See issue 55 (<a href="http://code.google.com/p/mockito/issues/detail?id=55">here</a>)
* or FAQ (<a href="http://code.google.com/p/mockito/wiki/FAQ">here</a>).
* <p>
* <b>Don't harm yourself.</b> reset() in the middle of the test method is a code smell (you're probably testing too much).
* <pre>
* List mock = mock(List.class);
* when(mock.size()).thenReturn(10);
* mock.add(1);
*
* reset(mock);
* //at this point the mock forgot any interactions & stubbing
* </pre>
*
* @param <T>
* @param mocks to be reset
*/
public static <T> void reset(T ... mocks) {
MOCKITO_CORE.reset(mocks);
}
/**
* Checks if any of given mocks has any unverified interaction.
* <p>
* You can use this method after you verified your mocks - to make sure that nothing
* else was invoked on your mocks.
* <p>
* See also {@link Mockito#never()} - it is more explicit and communicates the intent well.
* <p>
* Stubbed invocations (if called) are also treated as interactions.
* <p>
* A word of <b>warning</b>:
* Some users who did a lot of classic, expect-run-verify mocking tend to use verifyNoMoreInteractions() very often, even in every test method.
* verifyNoMoreInteractions() is not recommended to use in every test method.
* verifyNoMoreInteractions() is a handy assertion from the interaction testing toolkit. Use it only when it's relevant.
* Abusing it leads to overspecified, less maintainable tests. You can find further reading
* <a href="http://monkeyisland.pl/2008/07/12/should-i-worry-about-the-unexpected/">here</a>.
* <p>
* This method will also detect unverified invocations that occurred before the test method,
* for example: in setUp(), @Before method or in constructor.
* Consider writing nice code that makes interactions only in test methods.
*
* <p>
* Example:
*
* <pre>
* //interactions
* mock.doSomething();
* mock.doSomethingUnexpected();
*
* //verification
* verify(mock).doSomething();
*
* //following will fail because 'doSomethingUnexpected()' is unexpected
* verifyNoMoreInteractions(mock);
*
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param mocks to be verified
*/
public static void verifyNoMoreInteractions(Object... mocks) {
MOCKITO_CORE.verifyNoMoreInteractions(mocks);
}
/**
* Verifies that no interactions happened on given mocks.
* <pre>
* verifyZeroInteractions(mockOne, mockTwo);
* </pre>
* This method will also detect invocations
* that occurred before the test method, for example: in setUp(), @Before method or in constructor.
* Consider writing nice code that makes interactions only in test methods.
* <p>
* See also {@link Mockito#never()} - it is more explicit and communicates the intent well.
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param mocks to be verified
*/
public static void verifyZeroInteractions(Object... mocks) {
MOCKITO_CORE.verifyNoMoreInteractions(mocks);
}
/**
* <pre>
* //Instead of:
* stubVoid(mock).toThrow(e).on().someVoidMethod();
*
* //Please do:
* doThrow(e).when(mock).someVoidMethod();
* </pre>
*
* doThrow() replaces stubVoid() because of improved readability and consistency with the family of doAnswer() methods.
* <p>
* Originally, stubVoid() was used for stubbing void methods with exceptions. E.g:
*
* <pre>
* stubVoid(mock).toThrow(new RuntimeException()).on().someMethod();
*
* //you can stub with different behavior for consecutive calls.
* //Last stubbing (e.g. toReturn()) determines the behavior for further consecutive calls.
* stubVoid(mock)
* .toThrow(new RuntimeException())
* .toReturn()
* .on().someMethod();
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @deprecated Use {@link Mockito#doThrow(Throwable)} method for stubbing voids
*
* @param mock
* to stub
* @return stubbable object that allows stubbing with throwable
*/
public static <T> VoidMethodStubbable<T> stubVoid(T mock) {
return MOCKITO_CORE.stubVoid(mock);
}
/**
* Use doThrow() when you want to stub the void method with an exception.
* <p>
* Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets...
* <p>
* Example:
*
* <pre>
* doThrow(new RuntimeException()).when(mock).someVoidMethod();
* </pre>
*
* @param toBeThrown to be thrown when the stubbed method is called
* @return stubber - to select a method for stubbing
*/
public static Stubber doThrow(Throwable toBeThrown) {
return MOCKITO_CORE.doAnswer(new ThrowsException(toBeThrown));
}
/**
* Use doCallRealMethod() when you want to call the real implementation of a method.
* <p>
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven & well-designed code.
* <p>
* See also javadoc {@link Mockito#spy(Object)} to find out more about partial mocks.
* <b>Mockito.spy() is a recommended way of creating partial mocks.</b>
* The reason is it guarantees real methods are called against correctly constructed object because you're responsible for constructing the object passed to spy() method.
* <p>
* Example:
* <pre>
* Foo mock = mock(Foo.class);
* doCallRealMethod().when(mock).someVoidMethod();
*
* // this will call the real implementation of Foo.someVoidMethod()
* mock.someVoidMethod();
* </pre>
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return stubber - to select a method for stubbing
*/
public static Stubber doCallRealMethod() {
return MOCKITO_CORE.doAnswer(new CallsRealMethods());
}
/**
* Use doAnswer() when you want to stub a void method with generic {@link Answer}.
* <p>
* Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets...
* <p>
* Example:
*
* <pre>
* doAnswer(new Answer() {
* public Object answer(InvocationOnMock invocation) {
* Object[] args = invocation.getArguments();
* Mock mock = invocation.getMock();
* return null;
* }})
* .when(mock).someMethod();
* </pre>
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param answer to answer when the stubbed method is called
* @return stubber - to select a method for stubbing
*/
public static Stubber doAnswer(Answer answer) {
return MOCKITO_CORE.doAnswer(answer);
}
/**
* Use doNothing() for setting void methods to do nothing. <b>Beware that void methods on mocks do nothing by default!</b>
* However, there are rare situations when doNothing() comes handy:
* <p>
* 1. Stubbing consecutive calls on a void method:
* <pre>
* doNothing().
* doThrow(new RuntimeException())
* .when(mock).someVoidMethod();
*
* //does nothing the first time:
* mock.someVoidMethod();
*
* //throws RuntimeException the next time:
* mock.someVoidMethod();
* </pre>
*
* 2. When you spy real objects and you want the void method to do nothing:
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //let's make clear() do nothing
* doNothing().when(spy).clear();
*
* spy.add("one");
*
* //clear() does nothing, so the list still contains "one"
* spy.clear();
* </pre>
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return stubber - to select a method for stubbing
*/
public static Stubber doNothing() {
return MOCKITO_CORE.doAnswer(new DoesNothing());
}
/**
* Use doReturn() in those rare occasions when you cannot use {@link Mockito#when(Object)}.
* <p>
* <b>Beware that {@link Mockito#when(Object)} is always recommended for stubbing because it is argument type-safe
* and more readable</b> (especially when stubbing consecutive calls).
* <p>
* Here are those rare occasions when doReturn() comes handy:
* <p>
*
* 1. When spying real objects and calling real methods on a spy brings side effects
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty)
* when(spy.get(0)).thenReturn("foo");
*
* //You have to use doReturn() for stubbing:
* doReturn("foo").when(spy).get(0);
* </pre>
*
* 2. Overriding a previous exception-stubbing:
*
* <pre>
* when(mock.foo()).thenThrow(new RuntimeException());
*
* //Impossible: the exception-stubbed foo() method is called so RuntimeException is thrown.
* when(mock.foo()).thenReturn("bar");
*
* //You have to use doReturn() for stubbing:
* doReturn("bar").when(mock).foo();
* </pre>
*
* Above scenarios shows a tradeoff of Mockito's ellegant syntax. Note that the scenarios are very rare, though.
* Spying should be sporadic and overriding exception-stubbing is very rare. Not to mention that in general
* overridding stubbing is a potential code smell that points out too much stubbing.
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param toBeReturned to be returned when the stubbed method is called
* @return stubber - to select a method for stubbing
*/
public static Stubber doReturn(Object toBeReturned) {
return MOCKITO_CORE.doAnswer(new Returns(toBeReturned));
}
/**
* Creates InOrder object that allows verifying mocks in order.
*
* <pre>
* InOrder inOrder = inOrder(firstMock, secondMock);
*
* inOrder.verify(firstMock).add("was called first");
* inOrder.verify(secondMock).add("was called second");
* </pre>
*
* Verification in order is flexible - <b>you don't have to verify all interactions</b> one-by-one
* but only those that you are interested in testing in order.
* <p>
* Also, you can create InOrder object passing only mocks that are relevant for in-order verification.
* <p>
* InOrder verification is 'greedy'. You will hardly every notice it but
* if you want to find out more search for 'greedy' on the Mockito
* <a href="http://code.google.com/p/mockito/w/list">wiki pages</a>.
* <p>
* As of Mockito 1.8.4 you can verifyNoMoreInvocations() in order-sensitive way. Read more: {@link InOrder#verifyNoMoreInteractions()}
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param mocks to be verified in order
*
* @return InOrder object to be used to verify in order
*/
public static InOrder inOrder(Object... mocks) {
return MOCKITO_CORE.inOrder(mocks);
}
/**
* Allows verifying exact number of invocations. E.g:
* <pre>
* verify(mock, times(2)).someMethod("some arg");
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param wantedNumberOfInvocations wanted number of invocations
*
* @return verification mode
*/
public static VerificationMode times(int wantedNumberOfInvocations) {
return VerificationModeFactory.times(wantedNumberOfInvocations);
}
/**
* Alias to times(0), see {@link Mockito#times(int)}
* <p>
* Verifies that interaction did not happen. E.g:
* <pre>
* verify(mock, never()).someMethod();
* </pre>
*
* <p>
* If you want to verify there were NO interactions with the mock
* check out {@link Mockito#verifyZeroInteractions(Object...)}
* or {@link Mockito#verifyNoMoreInteractions(Object...)}
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return verification mode
*/
public static VerificationMode never() {
return times(0);
}
/**
* Allows at-least-once verification. E.g:
* <pre>
* verify(mock, atLeastOnce()).someMethod("some arg");
* </pre>
* Alias to atLeast(1)
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return verification mode
*/
public static VerificationMode atLeastOnce() {
return VerificationModeFactory.atLeastOnce();
}
/**
* Allows at-least-x verification. E.g:
* <pre>
* verify(mock, atLeast(3)).someMethod("some arg");
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param minNumberOfInvocations minimum number of invocations
*
* @return verification mode
*/
public static VerificationMode atLeast(int minNumberOfInvocations) {
return VerificationModeFactory.atLeast(minNumberOfInvocations);
}
/**
* Allows at-most-x verification. E.g:
* <pre>
* verify(mock, atMost(3)).someMethod("some arg");
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param maxNumberOfInvocations max number of invocations
*
* @return verification mode
*/
public static VerificationMode atMost(int maxNumberOfInvocations) {
return VerificationModeFactory.atMost(maxNumberOfInvocations);
}
/**
* Allows checking if given method was the only one invoked. E.g:
* <pre>
* verify(mock, only()).someMethod();
* //above is a shorthand for following 2 lines of code:
* verify(mock).someMethod();
* verifyNoMoreInvocations(mock);
* </pre>
*
* <p>
* See also {@link Mockito#verifyNoMoreInteractions(Object...)}
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return verification mode
*/
//TODO make exception message nicer
public static VerificationMode only() {
return VerificationModeFactory.only();
}
/**
* Allows verifying with timeout. May be useful for testing in concurrent conditions.
* <p>
* It feels this feature should be used rarely - figure out a better way of testing your multi-threaded system
* <p>
* Not yet implemented to work with InOrder verification.
* <pre>
* //passes when someMethod() is called within given time span
* verify(mock, timeout(100)).someMethod();
* //above is an alias to:
* verify(mock, timeout(100).times(1)).someMethod();
*
* //passes when someMethod() is called *exactly* 2 times within given time span
* verify(mock, timeout(100).times(2)).someMethod();
*
* //passes when someMethod() is called *at lest* 2 times within given time span
* verify(mock, timeout(100).atLeast(2)).someMethod();
*
* //verifies someMethod() within given time span using given verification mode
* //useful only if you have your own custom verification modes.
* verify(mock, new Timeout(100, yourOwnVerificationMode)).someMethod();
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param millis - time span in millis
*
* @return verification mode
*/
public static VerificationWithTimeout timeout(int millis) {
return new Timeout(millis, VerificationModeFactory.times(1));
}
/**
* First of all, in case of any trouble, I encourage you to read the Mockito FAQ: <a href="http://code.google.com/p/mockito/wiki/FAQ">http://code.google.com/p/mockito/wiki/FAQ</a>
* <p>
* In case of questions you may also post to mockito mailing list: <a href="http://groups.google.com/group/mockito">http://groups.google.com/group/mockito</a>
* <p>
* validateMockitoUsage() <b>explicitly validates</b> the framework state to detect invalid use of Mockito.
* However, this feature is optional <b>because Mockito validates the usage all the time...</b> but there is a gotcha so read on.
* <p>
* Examples of incorrect use:
* <pre>
* //Oups, someone forgot thenReturn() part:
* when(mock.get());
*
* //Oups, someone put the verified method call inside verify() where it should be outside:
* verify(mock.execute());
*
* //Oups, someone has used EasyMock for too long and forgot to specify the method to verify:
* verify(mock);
* </pre>
*
* Mockito throws exceptions if you misuse it so that you know if your tests are written correctly.
* The gotcha is that Mockito does the validation <b>next time</b> you use the framework (e.g. next time you verify, stub, call mock etc.).
* But even though the exception might be thrown in the next test,
* the exception <b>message contains a navigable stack trace element</b> with location of the defect.
* Hence you can click and find the place where Mockito was misused.
* <p>
* Sometimes though, you might want to validate the framework usage explicitly.
* For example, one of the users wanted to put validateMockitoUsage() in his @After method
* so that he knows immediately when he misused Mockito.
* Without it, he would have known about it not sooner than <b>next time</b> he used the framework.
* One more benefit of having validateMockitoUsage() in @After is that jUnit runner will always fail in the test method with defect
* whereas ordinary 'next-time' validation might fail the <b>next</b> test method.
* But even though JUnit might report next test as red, don't worry about it
* and just click at navigable stack trace element in the exception message to instantly locate the place where you misused mockito.
* <p>
* <b>Built-in runner: {@link MockitoJUnitRunner}</b> does validateMockitoUsage() after each test method.
* <p>
* Bear in mind that <b>usually you don't have to validateMockitoUsage()</b>
* and framework validation triggered on next-time basis should be just enough,
* mainly because of enhanced exception message with clickable location of defect.
* However, I would recommend validateMockitoUsage() if you already have sufficient test infrastructure
* (like your own runner or base class for all tests) because adding a special action to @After has zero cost.
* <p>
* See examples in javadoc for {@link Mockito} class
*/
public static void validateMockitoUsage() {
MOCKITO_CORE.validateMockitoUsage();
}
/**
* Allows mock creation with additional mock settings.
* <p>
* Don't use it too often.
* Consider writing simple tests that use simple mocks.
* Repeat after me: simple tests push simple, KISSy, readable & maintainable code.
* If you cannot write a test in a simple way - refactor the code under test.
* <p>
* Examples of mock settings:
* <pre>
* //Creates mock with different default answer & name
* Foo mock = mock(Foo.class, withSettings()
* .defaultAnswer(RETURNS_SMART_NULLS)
* .name("cool mockie"));
*
* //Creates mock with different default answer, descriptive name and extra interfaces
* Foo mock = mock(Foo.class, withSettings()
* .defaultAnswer(RETURNS_SMART_NULLS)
* .name("cool mockie")
* .extraInterfaces(Bar.class));
* </pre>
* {@link MockSettings} has been introduced for two reasons.
* Firstly, to make it easy to add another mock settings when the demand comes.
* Secondly, to enable combining different mock settings without introducing zillions of overloaded mock() methods.
* <p>
* See javadoc for {@link MockSettings} to learn about possible mock settings.
* <p>
*
* @return mock settings instance with defaults.
*/
public static MockSettings withSettings() {
return new MockSettingsImpl().defaultAnswer(RETURNS_DEFAULTS);
}
/*
* Helps debugging failing tests. Experimental - use at your own risk.
*/
@Deprecated
static MockitoDebugger debug() {
return new MockitoDebuggerImpl();
}
}
| src/org/mockito/Mockito.java | /*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito;
import org.mockito.internal.MockitoCore;
import org.mockito.internal.creation.MockSettingsImpl;
import org.mockito.internal.debugging.MockitoDebuggerImpl;
import org.mockito.internal.stubbing.answers.AnswerReturnValuesAdapter;
import org.mockito.internal.stubbing.answers.CallsRealMethods;
import org.mockito.internal.stubbing.answers.DoesNothing;
import org.mockito.internal.stubbing.answers.Returns;
import org.mockito.internal.stubbing.answers.ThrowsException;
import org.mockito.internal.stubbing.defaultanswers.ReturnsEmptyValues;
import org.mockito.internal.stubbing.defaultanswers.ReturnsMoreEmptyValues;
import org.mockito.internal.verification.VerificationModeFactory;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import org.mockito.stubbing.DeprecatedOngoingStubbing;
import org.mockito.stubbing.OngoingStubbing;
import org.mockito.stubbing.Stubber;
import org.mockito.stubbing.VoidMethodStubbable;
import org.mockito.verification.VerificationWithTimeout;
import org.mockito.verification.Timeout;
import org.mockito.verification.VerificationMode;
/**
* <p align="left"><img src="logo.jpg"/></p>
* Mockito library enables mocks creation, verification and stubbing.
* <p>
* This javadoc content is also available on the <a href="http://mockito.org">http://mockito.org</a> web page.
* All documentation is kept in javadocs because it guarantees consistency between what's on the web and what's in the source code.
* Also, it makes possible to access documentation straight from the IDE even if you work offline.
*
* <h1>Contents</h1>
*
* <b>
* <a href="#1">1. Let's verify some behaviour! </a><br/>
* <a href="#2">2. How about some stubbing? </a><br/>
* <a href="#3">3. Argument matchers </a><br/>
* <a href="#4">4. Verifying exact number of invocations / at least once / never </a><br/>
* <a href="#5">5. Stubbing void methods with exceptions </a><br/>
* <a href="#6">6. Verification in order </a><br/>
* <a href="#7">7. Making sure interaction(s) never happened on mock </a><br/>
* <a href="#8">8. Finding redundant invocations </a><br/>
* <a href="#9">9. Shorthand for mocks creation - @Mock annotation </a><br/>
* <a href="#10">10. Stubbing consecutive calls (iterator-style stubbing) </a><br/>
* <a href="#11">11. Stubbing with callbacks </a><br/>
* <a href="#12">12. doThrow()|doAnswer()|doNothing()|doReturn() family of methods mostly for stubbing voids </a><br/>
* <a href="#13">13. Spying on real objects </a><br/>
* <a href="#14">14. Changing default return values of unstubbed invocations (Since 1.7) </a><br/>
* <a href="#15">15. Capturing arguments for further assertions (Since 1.8.0) </a><br/>
* <a href="#16">16. Real partial mocks (Since 1.8.0) </a><br/>
* <a href="#17">17. Resetting mocks (Since 1.8.0) </a><br/>
* <a href="#18">18. Troubleshooting & validating framework usage (Since 1.8.0) </a><br/>
* <a href="#19">19. Aliases for behavior driven development (Since 1.8.0) </a><br/>
* <a href="#20">20. Serializable mocks (Since 1.8.1) </a><br/>
* <a href="#21">21. New annotations: @Captor, @Spy, @InjectMocks (Since 1.8.3) </a><br/>
* <a href="#22">22. (**New**) Verification with timeout (Since 1.8.5) </a><br/>
* </b>
*
* <p>
* Following examples mock a List, because everyone knows its interface (methods
* like add(), get(), clear() will be used). <br>
* You probably wouldn't mock List class 'in real'.
*
* <h3 id="1">1. Let's verify some behaviour!</h3>
*
* <pre>
* //Let's import Mockito statically so that the code looks clearer
* import static org.mockito.Mockito.*;
*
* //mock creation
* List mockedList = mock(List.class);
*
* //using mock object
* mockedList.add("one");
* mockedList.clear();
*
* //verification
* verify(mockedList).add("one");
* verify(mockedList).clear();
* </pre>
*
* <p>
* Once created, mock will remember all interactions. Then you can selectively
* verify whatever interaction you are interested in.
*
* <h3 id="2">2. How about some stubbing?</h3>
*
* <pre>
* //You can mock concrete classes, not only interfaces
* LinkedList mockedList = mock(LinkedList.class);
*
* //stubbing
* when(mockedList.get(0)).thenReturn("first");
* when(mockedList.get(1)).thenThrow(new RuntimeException());
*
* //following prints "first"
* System.out.println(mockedList.get(0));
*
* //following throws runtime exception
* System.out.println(mockedList.get(1));
*
* //following prints "null" because get(999) was not stubbed
* System.out.println(mockedList.get(999));
*
* //Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>
* //If your code cares what get(0) returns then something else breaks (often before even verify() gets executed).
* //If your code doesn't care what get(0) returns then it should not be stubbed. Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>.
* verify(mockedList).get(0);
* </pre>
*
* <ul>
* <li> By default, for all methods that return value, mock returns null, an
* empty collection or appropriate primitive/primitive wrapper value (e.g: 0,
* false, ... for int/Integer, boolean/Boolean, ...). </li>
*
* <li> Stubbing can be overridden: for example common stubbing can go to
* fixture setup but the test methods can override it.
* Please note that overridding stubbing is a potential code smell that points out too much stubbing</li>
*
* <li> Once stubbed, the method will always return stubbed value regardless
* of how many times it is called. </li>
*
* <li> Last stubbing is more important - when you stubbed the same method with
* the same arguments many times. </li>
*
* </ul>
*
* <h3 id="3">3. Argument matchers</h3>
*
* Mockito verifies argument values in natural java style: by using an equals() method.
* Sometimes, when extra flexibility is required then you might use argument matchers:
*
* <pre>
* //stubbing using built-in anyInt() argument matcher
* when(mockedList.get(anyInt())).thenReturn("element");
*
* //stubbing using hamcrest (let's say isValid() returns your own hamcrest matcher):
* when(mockedList.contains(argThat(isValid()))).thenReturn("element");
*
* //following prints "element"
* System.out.println(mockedList.get(999));
*
* //<b>you can also verify using an argument matcher</b>
* verify(mockedList).get(anyInt());
* </pre>
*
* <p>
* Argument matchers allow flexible verification or stubbing.
* {@link Matchers Click here to see} more built-in matchers
* and examples of <b>custom argument matchers / hamcrest matchers</b>.
* <p>
* For information solely on <b>custom argument matchers</b> check out javadoc for {@link ArgumentMatcher} class.
* <p>
* Be reasonable with using complicated argument matching.
* The natural matching style using equals() with occasional anyX() matchers tend to give clean & simple tests.
* Sometimes it's just better to refactor the code to allow equals() matching or even implement equals() method to help out with testing.
* <p>
* Also, read <a href="#15">section 15</a> or javadoc for {@link ArgumentCaptor} class.
* {@link ArgumentCaptor} is a special implementation of an argument matcher that captures argument values for further assertions.
* <p>
* <b>Warning on argument matchers:</b>
* <p>
* If you are using argument matchers, <b>all arguments</b> have to be provided
* by matchers.
* <p>
* E.g: (example shows verification but the same applies to stubbing):
*
* <pre>
* verify(mock).someMethod(anyInt(), anyString(), <b>eq("third argument")</b>);
* //above is correct - eq() is also an argument matcher
*
* verify(mock).someMethod(anyInt(), anyString(), <b>"third argument"</b>);
* //above is incorrect - exception will be thrown because third argument is given without an argument matcher.
* </pre>
*
* <p>
* Matcher methods like anyObject(), eq() <b>do not</b> return matchers.
* Internally, they record a matcher on a stack and return a dummy value (usually null).
* This implementation is due static type safety imposed by java compiler.
* The consequence is that you cannot use anyObject(), eq() methods outside of verified/stubbed method.
*
* <h3 id="4">4. Verifying exact number of invocations / at least x / never</h3>
*
* <pre>
* //using mock
* mockedList.add("once");
*
* mockedList.add("twice");
* mockedList.add("twice");
*
* mockedList.add("three times");
* mockedList.add("three times");
* mockedList.add("three times");
*
* //following two verifications work exactly the same - times(1) is used by default
* verify(mockedList).add("once");
* verify(mockedList, times(1)).add("once");
*
* //exact number of invocations verification
* verify(mockedList, times(2)).add("twice");
* verify(mockedList, times(3)).add("three times");
*
* //verification using never(). never() is an alias to times(0)
* verify(mockedList, never()).add("never happened");
*
* //verification using atLeast()/atMost()
* verify(mockedList, atLeastOnce()).add("three times");
* verify(mockedList, atLeast(2)).add("five times");
* verify(mockedList, atMost(5)).add("three times");
*
* </pre>
*
* <p>
* <b>times(1) is the default.</b> Therefore using times(1) explicitly can be
* omitted.
*
* <h3 id="5">5. Stubbing void methods with exceptions</h3>
*
* <pre>
* doThrow(new RuntimeException()).when(mockedList).clear();
*
* //following throws RuntimeException:
* mockedList.clear();
* </pre>
*
* Read more about doThrow|doAnswer family of methods in paragraph 12.
* <p>
* Initially, {@link Mockito#stubVoid(Object)} was used for stubbing voids.
* Currently stubVoid() is deprecated in favor of {@link Mockito#doThrow(Throwable)}.
* This is because of improved readability and consistency with the family of {@link Mockito#doAnswer(Answer)} methods.
*
* <h3 id="6">6. Verification in order</h3>
*
* <pre>
* // A. Single mock whose methods must be invoked in a particular order
* List singleMock = mock(List.class);
*
* //using a single mock
* singleMock.add("was added first");
* singleMock.add("was added second");
*
* //create an inOrder verifier for a single mock
* InOrder inOrder = inOrder(singleMock);
*
* //following will make sure that add is first called with "was added first, then with "was added second"
* inOrder.verify(singleMock).add("was added first");
* inOrder.verify(singleMock).add("was added second");
*
* // B. Multiple mocks that must be used in a particular order
* List firstMock = mock(List.class);
* List secondMock = mock(List.class);
*
* //using mocks
* firstMock.add("was called first");
* secondMock.add("was called second");
*
* //create inOrder object passing any mocks that need to be verified in order
* InOrder inOrder = inOrder(firstMock, secondMock);
*
* //following will make sure that firstMock was called before secondMock
* inOrder.verify(firstMock).add("was called first");
* inOrder.verify(secondMock).add("was called second");
*
* // Oh, and A + B can be mixed together at will
* </pre>
*
* Verification in order is flexible - <b>you don't have to verify all
* interactions</b> one-by-one but only those that you are interested in
* testing in order.
* <p>
* Also, you can create InOrder object passing only mocks that are relevant for
* in-order verification.
*
* <h3 id="7">7. Making sure interaction(s) never happened on mock</h3>
*
* <pre>
* //using mocks - only mockOne is interacted
* mockOne.add("one");
*
* //ordinary verification
* verify(mockOne).add("one");
*
* //verify that method was never called on a mock
* verify(mockOne, never()).add("two");
*
* //verify that other mocks were not interacted
* verifyZeroInteractions(mockTwo, mockThree);
*
* </pre>
*
* <h3 id="8">8. Finding redundant invocations</h3>
*
* <pre>
* //using mocks
* mockedList.add("one");
* mockedList.add("two");
*
* verify(mockedList).add("one");
*
* //following verification will fail
* verifyNoMoreInteractions(mockedList);
* </pre>
*
* A word of <b>warning</b>:
* Some users who did a lot of classic, expect-run-verify mocking tend to use verifyNoMoreInteractions() very often, even in every test method.
* verifyNoMoreInteractions() is not recommended to use in every test method.
* verifyNoMoreInteractions() is a handy assertion from the interaction testing toolkit. Use it only when it's relevant.
* Abusing it leads to overspecified, less maintainable tests. You can find further reading
* <a href="http://monkeyisland.pl/2008/07/12/should-i-worry-about-the-unexpected/">here</a>.
*
* <p>
* See also {@link Mockito#never()} - it is more explicit and
* communicates the intent well.
* <p>
*
* <h3 id="9">9. Shorthand for mocks creation - @Mock annotation</h3>
*
* <ul>
* <li>Minimizes repetitive mock creation code.</li>
* <li>Makes the test class more readable.</li>
* <li>Makes the verification error easier to read because the <b>field name</b>
* is used to identify the mock.</li>
* </ul>
*
* <pre>
* public class ArticleManagerTest {
*
* @Mock private ArticleCalculator calculator;
* @Mock private ArticleDatabase database;
* @Mock private UserProvider userProvider;
*
* private ArticleManager manager;
* </pre>
*
* <b>Important!</b> This needs to be somewhere in the base class or a test
* runner:
*
* <pre>
* MockitoAnnotations.initMocks(testClass);
* </pre>
*
* You can use built-in runner: {@link MockitoJUnitRunner}.
* <p>
* Read more here: {@link MockitoAnnotations}
*
* <h3 id="10"> 10. Stubbing consecutive calls (iterator-style stubbing)</h3>
*
* Sometimes we need to stub with different return value/exception for the same
* method call. Typical use case could be mocking iterators.
* Original version of Mockito did not have this feature to promote simple mocking.
* For example, instead of iterators one could use {@link Iterable} or simply
* collections. Those offer natural ways of stubbing (e.g. using real
* collections). In rare scenarios stubbing consecutive calls could be useful,
* though:
* <p>
*
* <pre>
* when(mock.someMethod("some arg"))
* .thenThrow(new RuntimeException())
* .thenReturn("foo");
*
* //First call: throws runtime exception:
* mock.someMethod("some arg");
*
* //Second call: prints "foo"
* System.out.println(mock.someMethod("some arg"));
*
* //Any consecutive call: prints "foo" as well (last stubbing wins).
* System.out.println(mock.someMethod("some arg"));
* </pre>
*
* Alternative, shorter version of consecutive stubbing:
*
* <pre>
* when(mock.someMethod("some arg"))
* .thenReturn("one", "two", "three");
* </pre>
*
* <h3 id="11"> 11. Stubbing with callbacks</h3>
*
* Allows stubbing with generic {@link Answer} interface.
* <p>
* Yet another controversial feature which was not included in Mockito
* originally. We recommend using simple stubbing with thenReturn() or
* thenThrow() only. Those two should be <b>just enough</b> to test/test-drive
* any clean & simple code.
*
* <pre>
* when(mock.someMethod(anyString())).thenAnswer(new Answer() {
* Object answer(InvocationOnMock invocation) {
* Object[] args = invocation.getArguments();
* Object mock = invocation.getMock();
* return "called with arguments: " + args;
* }
* });
*
* //Following prints "called with arguments: foo"
* System.out.println(mock.someMethod("foo"));
* </pre>
*
* <h3 id="12"> 12. doThrow()|doAnswer()|doNothing()|doReturn() family of methods for stubbing voids (mostly)</h3>
*
* Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets...
* <p>
* {@link Mockito#doThrow(Throwable)} replaces the {@link Mockito#stubVoid(Object)} method for stubbing voids.
* The main reason is improved readability and consistency with the family of doAnswer() methods.
* <p>
* Use doThrow() when you want to stub a void method with an exception:
* <pre>
* doThrow(new RuntimeException()).when(mockedList).clear();
*
* //following throws RuntimeException:
* mockedList.clear();
* </pre>
*
* Read more about other methods:
* <p>
* {@link Mockito#doThrow(Throwable)}
* <p>
* {@link Mockito#doAnswer(Answer)}
* <p>
* {@link Mockito#doNothing()}
* <p>
* {@link Mockito#doReturn(Object)}
*
* <h3 id="13"> 13. Spying on real objects</h3>
*
* You can create spies of real objects. When you use the spy then the <b>real</b> methods are called (unless a method was stubbed).
* <p>
* Real spies should be used <b>carefully and occasionally</b>, for example when dealing with legacy code.
*
* <p>
* Spying on real objects can be associated with "partial mocking" concept.
* <b>Before the release 1.8</b>, Mockito spies were not real partial mocks.
* The reason was we thought partial mock is a code smell.
* At some point we found legitimate use cases for partial mocks
* (3rd party interfaces, interim refactoring of legacy code, the full article is <a href=
* "http://monkeyisland.pl/2009/01/13/subclass-and-override-vs-partial-mocking-vs-refactoring"
* >here</a>)
* <p>
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //optionally, you can stub out some methods:
* when(spy.size()).thenReturn(100);
*
* //using the spy calls <b>real</b> methods
* spy.add("one");
* spy.add("two");
*
* //prints "one" - the first element of a list
* System.out.println(spy.get(0));
*
* //size() method was stubbed - 100 is printed
* System.out.println(spy.size());
*
* //optionally, you can verify
* verify(spy).add("one");
* verify(spy).add("two");
* </pre>
*
* <h4>Important gotcha on spying real objects!</h4>
*
* 1. Sometimes it's impossible to use {@link Mockito#when(Object)} for stubbing spies. Example:
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty)
* when(spy.get(0)).thenReturn("foo");
*
* //You have to use doReturn() for stubbing
* doReturn("foo").when(spy).get(0);
* </pre>
*
* 2. Watch out for final methods.
* Mockito doesn't mock final methods so the bottom line is: when you spy on real objects + you try to stub a final method = trouble.
* What will happen is the real method will be called *on mock* but *not on the real instance* you passed to the spy() method.
* Typically you may get a NullPointerException because mock instances don't have fields initiated.
*
* <h3 id="14">14. Changing default return values of unstubbed invocations (Since 1.7) </h3>
*
* You can create a mock with specified strategy for its return values.
* It's quite advanced feature and typically you don't need it to write decent tests.
* However, it can be helpful for working with <b>legacy systems</b>.
* <p>
* It is the default answer so it will be used <b>only when you don't</b> stub the method call.
*
* <pre>
* Foo mock = mock(Foo.class, Mockito.RETURNS_SMART_NULLS);
* Foo mockTwo = mock(Foo.class, new YourOwnAnswer());
* </pre>
*
* <p>
* Read more about this interesting implementation of <i>Answer</i>: {@link Mockito#RETURNS_SMART_NULLS}
*
* <h3 id="15">15. Capturing arguments for further assertions (Since 1.8.0) </h3>
*
* Mockito verifies argument values in natural java style: by using an equals() method.
* This is also the recommended way of matching arguments because it makes tests clean & simple.
* In some situations though, it is helpful to assert on certain arguments after the actual verification.
* For example:
* <pre>
* ArgumentCaptor<Person> argument = ArgumentCaptor.forClass(Person.class);
* verify(mock).doSomething(argument.capture());
* assertEquals("John", argument.getValue().getName());
* </pre>
*
* <b>Warning:</b> it is recommended to use ArgumentCaptor with verification <b>but not</b> with stubbing.
* Using ArgumentCaptor with stubbing may decrease test readability because captor is created outside of assert (aka verify or 'then') block.
* Also it may reduce defect localization because if stubbed method was not called then no argument is captured.
* <p>
* In a way ArgumentCaptor is related to custom argument matchers (see javadoc for {@link ArgumentMatcher} class).
* Both techniques can be used for making sure certain arguments where passed to mocks.
* However, ArgumentCaptor may be a better fit if:
* <ul>
* <li>custom argument matcher is not likely to be reused</li>
* <li>you just need it to assert on argument values to complete verification</li>
* </ul>
* Custom argument matchers via {@link ArgumentMatcher} are usually better for stubbing.
*
* <h3 id="16">16. Real partial mocks (Since 1.8.0) </h3>
*
* Finally, after many internal debates & discussions on the mailing list, partial mock support was added to Mockito.
* Previously we considered partial mocks as code smells. However, we found a legitimate use case for partial mocks - more reading:
* <a href="http://monkeyisland.pl/2009/01/13/subclass-and-override-vs-partial-mocking-vs-refactoring">here</a>
* <p>
* <b>Before release 1.8</b> spy() was not producing real partial mocks and it was confusing for some users.
* Read more about spying: <a href="#13">here</a> or in javadoc for {@link Mockito#spy(Object)} method.
* <p>
* <pre>
* //you can create partial mock with spy() method:
* List list = spy(new LinkedList());
*
* //you can enable partial mock capabilities selectively on mocks:
* Foo mock = mock(Foo.class);
* //Be sure the real implementation is 'safe'.
* //If real implementation throws exceptions or depends on specific state of the object then you're in trouble.
* when(mock.someMethod()).thenCallRealMethod();
* </pre>
*
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven & well-designed code.
*
* <h3 id="17">17. Resetting mocks (Since 1.8.0) </h3>
*
* Smart Mockito users hardly use this feature because they know it could be a sign of poor tests.
* Normally, you don't need to reset your mocks, just create new mocks for each test method.
* <p>
* Instead of reset() please consider writing simple, small and focused test methods over lengthy, over-specified tests.
* <b>First potential code smell is reset() in the middle of the test method.</b> This probably means you're testing too much.
* Follow the whisper of your test methods: "Please keep us small & focused on single behavior".
* There are several threads about it on mockito mailing list.
* <p>
* The only reason we added reset() method is to
* make it possible to work with container-injected mocks.
* See issue 55 (<a href="http://code.google.com/p/mockito/issues/detail?id=55">here</a>)
* or FAQ (<a href="http://code.google.com/p/mockito/wiki/FAQ">here</a>).
* <p>
* <b>Don't harm yourself.</b> reset() in the middle of the test method is a code smell (you're probably testing too much).
* <pre>
* List mock = mock(List.class);
* when(mock.size()).thenReturn(10);
* mock.add(1);
*
* reset(mock);
* //at this point the mock forgot any interactions & stubbing
* </pre>
*
* <h3 id="18">18. Troubleshooting & validating framework usage (Since 1.8.0) </h3>
*
* First of all, in case of any trouble, I encourage you to read the Mockito FAQ:
* <a href="http://code.google.com/p/mockito/wiki/FAQ">http://code.google.com/p/mockito/wiki/FAQ</a>
* <p>
* In case of questions you may also post to mockito mailing list:
* <a href="http://groups.google.com/group/mockito">http://groups.google.com/group/mockito</a>
* <p>
* Next, you should know that Mockito validates if you use it correctly <b>all the time</b>.
* However, there's a gotcha so please read the javadoc for {@link Mockito#validateMockitoUsage()}
*
* <h3 id="19">19. Aliases for behavior driven development (Since 1.8.0) </h3>
*
* Behavior Driven Development style of writing tests uses <b>//given //when //then</b> comments as fundamental parts of your test methods.
* This is exactly how we write our tests and we warmly encourage you to do so!
* <p>
* Start learning about BDD here: <a href="http://en.wikipedia.org/wiki/Behavior_Driven_Development">http://en.wikipedia.org/wiki/Behavior_Driven_Development</a>
* <p>
* The problem is that current stubbing api with canonical role of <b>when</b> word does not integrate nicely with <b>//given //when //then</b> comments.
* It's because stubbing belongs to <b>given</b> component of the test and not to the <b>when</b> component of the test.
* Hence {@link BDDMockito} class introduces an alias so that you stub method calls with {@link BDDMockito#given(Object)} method.
* Now it really nicely integrates with the <b>given</b> component of a BDD style test!
* <p>
* Here is how the test might look like:
* <pre>
* import static org.mockito.BDDMockito.*;
*
* Seller seller = mock(Seller.class);
* Shop shop = new Shop(seller);
*
* public void shouldBuyBread() throws Exception {
* //given
* given(seller.askForBread()).willReturn(new Bread());
*
* //when
* Goods goods = shop.buyBread();
*
* //then
* assertThat(goods, containBread());
* }
* </pre>
*
* <h3 id="20">20. (**New**) Serializable mocks (Since 1.8.1) </h3>
*
* Mocks can be made serializable. With this feature you can use a mock in a place that requires dependencies to be serializable.
* <p>
* WARNING: This should be rarely used in unit testing.
* <p>
* The behaviour was implemented for a specific use case of a BDD spec that had an unreliable external dependency. This
* was in a web environment and the objects from the external dependency were being serialized to pass between layers.
* <p>
* To create serializable mock use {@link MockSettings#serializable()}:
* <pre>
* List serializableMock = mock(List.class, withSettings().serializable());
* </pre>
* <p>
* The mock can be serialized assuming all the normal <a href='http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html'>
* serialization requirements</a> are met by the class.
* <p>
* Making a real object spy serializable is a bit more effort as the spy(...) method does not have an overloaded version
* which accepts MockSettings. No worries, you will hardly ever use it.
*
* <pre>
* List<Object> list = new ArrayList<Object>();
* List<Object> spy = mock(ArrayList.class, withSettings()
* .spiedInstance(list)
* .defaultAnswer(CALLS_REAL_METHODS)
* .serializable());
* </pre>
*
* <h3 id="21">21. (**New**) New annotations: @Captor, @Spy, @InjectMocks (Since 1.8.3) </h3>
* <p>
* Release 1.8.3 brings new annotations that may be helpful on occasion:
*
* <ul>
* <li>@{@link Captor} simplifies creation of {@link ArgumentCaptor}
* - useful when the argument to capture is a nasty generic class and you want to avoid compiler warnings
* <li>@{@link Spy} - you can use it instead {@link Mockito#spy(Object)}.
* <li>@{@link InjectMocks} - injects mocks into tested object automatically.
* </ul>
* <p>
* All new annotations are *only* processed on {@link MockitoAnnotations#initMocks(Object)}
* <p>
* <h3 id="22">22. (**New**) Verification with timeout (Since 1.8.5) </h3>
* <p>
* Allows verifying with timeout. May be useful for testing in concurrent conditions.
* <p>
* It feels this feature should be used rarely - figure out a better way of testing your multi-threaded system.
* <p>
* Not yet implemented to work with InOrder verification.
* <p>
* Examples:
* <p>
* <pre>
* //passes when someMethod() is called within given time span
* verify(mock, timeout(100)).someMethod();
* //above is an alias to:
* verify(mock, timeout(100).times(1)).someMethod();
*
* //passes when someMethod() is called *exactly* 2 times within given time span
* verify(mock, timeout(100).times(2)).someMethod();
*
* //passes when someMethod() is called *at lest* 2 times within given time span
* verify(mock, timeout(100).atLeast(2)).someMethod();
*
* //verifies someMethod() within given time span using given verification mode
* //useful only if you have your own custom verification modes.
* verify(mock, new Timeout(100, yourOwnVerificationMode)).someMethod();
* </pre>
*/
@SuppressWarnings("unchecked")
public class Mockito extends Matchers {
static final MockitoCore MOCKITO_CORE = new MockitoCore();
/**
* The default Answer of every mock <b>if</b> the mock was not stubbed.
* Typically it just returns some empty value.
* <p>
* {@link Answer} can be used to define the return values of unstubbed invocations.
* <p>
* This implementation first tries the global configuration.
* If there is no global configuration then it uses {@link ReturnsEmptyValues} (returns zeros, empty collections, nulls, etc.)
*/
public static final Answer<Object> RETURNS_DEFAULTS = Answers.RETURNS_DEFAULTS.get();
/**
* Optional Answer to be used with {@link Mockito#mock(Class, Answer)}
* <p>
* {@link Answer} can be used to define the return values of unstubbed invocations.
* <p>
* This implementation can be helpful when working with legacy code.
* Unstubbed methods often return null. If your code uses the object returned by an unstubbed call you get a NullPointerException.
* This implementation of Answer <b>returns SmartNull instead of null</b>.
* SmartNull gives nicer exception message than NPE because it points out the line where unstubbed method was called. You just click on the stack trace.
* <p>
* ReturnsSmartNulls first tries to return ordinary return values (see {@link ReturnsMoreEmptyValues})
* then it tries to return SmartNull. If the return type is final then plain null is returned.
* <p>
* ReturnsSmartNulls will be probably the default return values strategy in Mockito 2.0
* <p>
* Example:
* <pre>
* Foo mock = (Foo.class, RETURNS_SMART_NULLS);
*
* //calling unstubbed method here:
* Stuff stuff = mock.getStuff();
*
* //using object returned by unstubbed call:
* stuff.doSomething();
*
* //Above doesn't yield NullPointerException this time!
* //Instead, SmartNullPointerException is thrown.
* //Exception's cause links to unstubbed <i>mock.getStuff()</i> - just click on the stack trace.
* </pre>
*/
public static final Answer<Object> RETURNS_SMART_NULLS = Answers.RETURNS_SMART_NULLS.get();
/**
* Optional Answer to be used with {@link Mockito#mock(Class, Answer)}
* <p>
* {@link Answer} can be used to define the return values of unstubbed invocations.
* <p>
* This implementation can be helpful when working with legacy code.
* <p>
* ReturnsMocks first tries to return ordinary return values (see {@link ReturnsMoreEmptyValues})
* then it tries to return mocks. If the return type cannot be mocked (e.g. is final) then plain null is returned.
* <p>
*/
public static final Answer<Object> RETURNS_MOCKS = Answers.RETURNS_MOCKS.get();
/**
* Optional Answer to be used with {@link Mockito#mock(Class, Answer)}
* <p>
* Example that shows how deep stub works:
* <pre>
* Foo mock = mock(Foo.class, RETURNS_DEEP_STUBS);
*
* // note that we're stubbing a chain of methods here: getBar().getName()
* when(mock.getBar().getName()).thenReturn("deep");
*
* // note that we're chaining method calls: getBar().getName()
* assertEquals("deep", mock.getBar().getName());
* </pre>
*
* <strong>Verification API does not support 'chaining'</strong> so deep stub doesn't change how you do verification.
* <p>
* <strong>WARNING: </strong>
* This feature should rarely be required for regular clean code! Leave it for legacy code.
* Mocking a mock to return a mock, to return a mock, (...), to return something meaningful
* hints at violation of Law of Demeter or mocking a value object (a well known anti-pattern).
* <p>
* Good quote I've seen one day on the web: <strong>every time a mock returns a mock a fairy dies</strong>.
* <p>
* How deep stub work internally?
* <pre>
* //this:
* Foo mock = mock(Foo.class, RETURNS_DEEP_STUBS);
* when(mock.getBar().getName(), "deep");
*
* //is equivalent of
* Foo foo = mock(Foo.class);
* Bar bar = mock(Bar.class);
* when(foo.getBar()).thenReturn(bar);
* when(bar.getName()).thenReturn("deep");
* </pre>
* <p>
* This feature will not work when any return type of methods included in the chain cannot be mocked
* (for example: is a primitive or a final class). This is because of java type system.
*/
public static final Answer<Object> RETURNS_DEEP_STUBS = Answers.RETURNS_DEEP_STUBS.get();
/**
* Optional Answer to be used with {@link Mockito#mock(Class, Answer)}
* <p>
* {@link Answer} can be used to define the return values of unstubbed invocations.
* <p>
* This implementation can be helpful when working with legacy code.
* When this implementation is used, unstubbed methods will delegate to the real implementation.
* This is a way to create a partial mock object that calls real methods by default.
* <p>
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven & well-designed code.
* <p>
* Example:
* <pre>
* Foo mock = mock(Foo.class, CALLS_REAL_METHODS);
*
* // this calls the real implementation of Foo.getSomething()
* value = mock.getSomething();
*
* when(mock.getSomething()).thenReturn(fakeValue);
*
* // now fakeValue is returned
* value = mock.getSomething();
* </pre>
*/
public static final Answer<Object> CALLS_REAL_METHODS = Answers.CALLS_REAL_METHODS.get();
/**
* Creates mock object of given class or interface.
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param classToMock class or interface to mock
* @return mock object
*/
public static <T> T mock(Class<T> classToMock) {
return mock(classToMock, withSettings().defaultAnswer(RETURNS_DEFAULTS));
}
/**
* Specifies mock name. Naming mocks can be helpful for debugging - the name is used in all verification errors.
* <p>
* Beware that naming mocks is not a solution for complex code which uses too many mocks or collaborators.
* <b>If you have too many mocks then refactor the code</b> so that it's easy to test/debug without necessity of naming mocks.
* <p>
* <b>If you use @Mock annotation then you've got naming mocks for free!</b> @Mock uses field name as mock name. {@link Mock Read more.}
* <p>
*
* See examples in javadoc for {@link Mockito} class
*
* @param classToMock class or interface to mock
* @param name of the mock
* @return mock object
*/
public static <T> T mock(Class<T> classToMock, String name) {
return mock(classToMock, withSettings()
.name(name)
.defaultAnswer(RETURNS_DEFAULTS));
}
/**
* @deprecated
* <b>Please use mock(Foo.class, defaultAnswer);</b>
* <p>
* See {@link Mockito#mock(Class, Answer)}
* <p>
* Why it is deprecated? ReturnValues is being replaced by Answer
* for better consistency & interoperability of the framework.
* Answer interface has been in Mockito for a while and it has the same responsibility as ReturnValues.
* There's no point in mainting exactly the same interfaces.
* <p>
* Creates mock with a specified strategy for its return values.
* It's quite advanced feature and typically you don't need it to write decent tests.
* However it can be helpful when working with legacy systems.
* <p>
* Obviously return values are used only when you don't stub the method call.
*
* <pre>
* Foo mock = mock(Foo.class, Mockito.RETURNS_SMART_NULLS);
* Foo mockTwo = mock(Foo.class, new YourOwnReturnValues());
* </pre>
*
* <p>See examples in javadoc for {@link Mockito} class</p>
*
* @param classToMock class or interface to mock
* @param returnValues default return values for unstubbed methods
*
* @return mock object
*/
@Deprecated
public static <T> T mock(Class<T> classToMock, ReturnValues returnValues) {
return mock(classToMock, withSettings().defaultAnswer(new AnswerReturnValuesAdapter(returnValues)));
}
/**
* Creates mock with a specified strategy for its answers to interactions.
* It's quite advanced feature and typically you don't need it to write decent tests.
* However it can be helpful when working with legacy systems.
* <p>
* It is the default answer so it will be used <b>only when you don't</b> stub the method call.
*
* <pre>
* Foo mock = mock(Foo.class, RETURNS_SMART_NULLS);
* Foo mockTwo = mock(Foo.class, new YourOwnAnswer());
* </pre>
*
* <p>See examples in javadoc for {@link Mockito} class</p>
*
* @param classToMock class or interface to mock
* @param defaultAnswer default answer for unstubbed methods
*
* @return mock object
*/
public static <T> T mock(Class<T> classToMock, Answer defaultAnswer) {
return mock(classToMock, withSettings().defaultAnswer(defaultAnswer));
}
/**
* Creates a mock with some non-standard settings.
* <p>
* The number of configuration points for a mock grows
* so we need a fluent way to introduce new configuration without adding more and more overloaded Mockito.mock() methods.
* Hence {@link MockSettings}.
* <pre>
* Listener mock = mock(Listener.class, withSettings()
* .name("firstListner").defaultBehavior(RETURNS_SMART_NULLS));
* );
* </pre>
* <b>Use it carefully and occasionally</b>. What might be reason your test needs non-standard mocks?
* Is the code under test so complicated that it requires non-standard mocks?
* Wouldn't you prefer to refactor the code under test so it is testable in a simple way?
* <p>
* See also {@link Mockito#withSettings()}
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param classToMock class or interface to mock
* @param mockSettings additional mock settings
* @return mock object
*/
public static <T> T mock(Class<T> classToMock, MockSettings mockSettings) {
return MOCKITO_CORE.mock(classToMock, mockSettings);
}
/**
* Creates a spy of the real object. The spy calls <b>real</b> methods unless they are stubbed.
* <p>
* Real spies should be used <b>carefully and occasionally</b>, for example when dealing with legacy code.
* <p>
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven & well-designed code.
* <p>
* Example:
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //optionally, you can stub out some methods:
* when(spy.size()).thenReturn(100);
*
* //using the spy calls <b>real</b> methods
* spy.add("one");
* spy.add("two");
*
* //prints "one" - the first element of a list
* System.out.println(spy.get(0));
*
* //size() method was stubbed - 100 is printed
* System.out.println(spy.size());
*
* //optionally, you can verify
* verify(spy).add("one");
* verify(spy).add("two");
* </pre>
*
* <h4>Important gotcha on spying real objects!</h4>
*
* 1. Sometimes it's impossible to use {@link Mockito#when(Object)} for stubbing spies. Example:
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty)
* when(spy.get(0)).thenReturn("foo");
*
* //You have to use doReturn() for stubbing
* doReturn("foo").when(spy).get(0);
* </pre>
*
* 2. Watch out for final methods.
* Mockito doesn't mock final methods so the bottom line is: when you spy on real objects + you try to stub a final method = trouble.
* What will happen is the real method will be called *on mock* but *not on the real instance* you passed to the spy() method.
* Typically you may get a NullPointerException because mock instances don't have fields initiated.
*
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param object
* to spy on
* @return a spy of the real object
*/
public static <T> T spy(T object) {
return MOCKITO_CORE.mock((Class<T>) object.getClass(), withSettings()
.spiedInstance(object)
.defaultAnswer(CALLS_REAL_METHODS));
}
/**
* Stubs a method call with return value or an exception. E.g:
*
* <pre>
* stub(mock.someMethod()).toReturn(10);
*
* //you can use flexible argument matchers, e.g:
* stub(mock.someMethod(<b>anyString()</b>)).toReturn(10);
*
* //setting exception to be thrown:
* stub(mock.someMethod("some arg")).toThrow(new RuntimeException());
*
* //you can stub with different behavior for consecutive method calls.
* //Last stubbing (e.g: toReturn("foo")) determines the behavior for further consecutive calls.
* stub(mock.someMethod("some arg"))
* .toThrow(new RuntimeException())
* .toReturn("foo");
* </pre>
* <p>
* Some users find stub() confusing therefore {@link Mockito#when(Object)} is recommended over stub()
* <pre>
* //Instead of:
* stub(mock.count()).toReturn(10);
*
* //You can do:
* when(mock.count()).thenReturn(10);
* </pre>
* For stubbing void methods with throwables see: {@link Mockito#doThrow(Throwable)}
* <p>
* Stubbing can be overridden: for example common stubbing can go to fixture
* setup but the test methods can override it.
* Please note that overridding stubbing is a potential code smell that points out too much stubbing.
* <p>
* Once stubbed, the method will always return stubbed value regardless
* of how many times it is called.
* <p>
* Last stubbing is more important - when you stubbed the same method with
* the same arguments many times.
* <p>
* Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>.
* Let's say you've stubbed foo.bar().
* If your code cares what foo.bar() returns then something else breaks(often before even verify() gets executed).
* If your code doesn't care what get(0) returns then it should not be stubbed.
* Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>.
*
* @param methodCall
* method call
* @return DeprecatedOngoingStubbing object to set stubbed value/exception
*/
public static <T> DeprecatedOngoingStubbing<T> stub(T methodCall) {
return MOCKITO_CORE.stub(methodCall);
}
/**
* Enables stubbing methods. Use it when you want the mock to return particular value when particular method is called.
* <p>
* Simply put: "<b>When</b> the x method is called <b>then</b> return y".
* <p>
* <b>when() is a successor of deprecated {@link Mockito#stub(Object)}</b>
* <p>
* Examples:
*
* <pre>
* <b>when</b>(mock.someMethod()).<b>thenReturn</b>(10);
*
* //you can use flexible argument matchers, e.g:
* when(mock.someMethod(<b>anyString()</b>)).thenReturn(10);
*
* //setting exception to be thrown:
* when(mock.someMethod("some arg")).thenThrow(new RuntimeException());
*
* //you can set different behavior for consecutive method calls.
* //Last stubbing (e.g: thenReturn("foo")) determines the behavior of further consecutive calls.
* when(mock.someMethod("some arg"))
* .thenThrow(new RuntimeException())
* .thenReturn("foo");
*
* //Alternative, shorter version for consecutive stubbing:
* when(mock.someMethod("some arg"))
* .thenReturn("one", "two");
* //is the same as:
* when(mock.someMethod("some arg"))
* .thenReturn("one")
* .thenReturn("two");
*
* //shorter version for consecutive method calls throwing exceptions:
* when(mock.someMethod("some arg"))
* .thenThrow(new RuntimeException(), new NullPointerException();
*
* </pre>
*
* For stubbing void methods with throwables see: {@link Mockito#doThrow(Throwable)}
* <p>
* Stubbing can be overridden: for example common stubbing can go to fixture
* setup but the test methods can override it.
* Please note that overridding stubbing is a potential code smell that points out too much stubbing.
* <p>
* Once stubbed, the method will always return stubbed value regardless
* of how many times it is called.
* <p>
* Last stubbing is more important - when you stubbed the same method with
* the same arguments many times.
* <p>
* Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>.
* Let's say you've stubbed foo.bar().
* If your code cares what foo.bar() returns then something else breaks(often before even verify() gets executed).
* If your code doesn't care what get(0) returns then it should not be stubbed.
* Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>.
*
* <p>
* See examples in javadoc for {@link Mockito} class
* @param methodCall method to be stubbed
*/
public static <T> OngoingStubbing<T> when(T methodCall) {
return MOCKITO_CORE.when(methodCall);
}
/**
* Verifies certain behavior <b>happened once</b>
* <p>
* Alias to <code>verify(mock, times(1))</code> E.g:
* <pre>
* verify(mock).someMethod("some arg");
* </pre>
* Above is equivalent to:
* <pre>
* verify(mock, times(1)).someMethod("some arg");
* </pre>
* <p>
* Arguments passed are compared using equals() method.
* Read about {@link ArgumentCaptor} or {@link ArgumentMatcher} to find out other ways of matching / asserting arguments passed.
* <p>
* Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>.
* Let's say you've stubbed foo.bar().
* If your code cares what foo.bar() returns then something else breaks(often before even verify() gets executed).
* If your code doesn't care what get(0) returns then it should not be stubbed.
* Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>.
*
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param mock to be verified
* @return mock object itself
*/
public static <T> T verify(T mock) {
return MOCKITO_CORE.verify(mock, times(1));
}
/**
* Verifies certain behavior happened at least once / exact number of times / never. E.g:
* <pre>
* verify(mock, times(5)).someMethod("was called five times");
*
* verify(mock, atLeast(2)).someMethod("was called at least two times");
*
* //you can use flexible argument matchers, e.g:
* verify(mock, atLeastOnce()).someMethod(<b>anyString()</b>);
* </pre>
*
* <b>times(1) is the default</b> and can be omitted
* <p>
* Arguments passed are compared using equals() method.
* Read about {@link ArgumentCaptor} or {@link ArgumentMatcher} to find out other ways of matching / asserting arguments passed.
* <p>
*
* @param mock to be verified
* @param mode times(x), atLeastOnce() or never()
*
* @return mock object itself
*/
public static <T> T verify(T mock, VerificationMode mode) {
return MOCKITO_CORE.verify(mock, mode);
}
/**
* Smart Mockito users hardly use this feature because they know it could be a sign of poor tests.
* Normally, you don't need to reset your mocks, just create new mocks for each test method.
* <p>
* Instead of reset() please consider writing simple, small and focused test methods over lengthy, over-specified tests.
* <b>First potential code smell is reset() in the middle of the test method.</b> This probably means you're testing too much.
* Follow the whisper of your test methods: "Please keep us small & focused on single behavior".
* There are several threads about it on mockito mailing list.
* <p>
* The only reason we added reset() method is to
* make it possible to work with container-injected mocks.
* See issue 55 (<a href="http://code.google.com/p/mockito/issues/detail?id=55">here</a>)
* or FAQ (<a href="http://code.google.com/p/mockito/wiki/FAQ">here</a>).
* <p>
* <b>Don't harm yourself.</b> reset() in the middle of the test method is a code smell (you're probably testing too much).
* <pre>
* List mock = mock(List.class);
* when(mock.size()).thenReturn(10);
* mock.add(1);
*
* reset(mock);
* //at this point the mock forgot any interactions & stubbing
* </pre>
*
* @param <T>
* @param mocks to be reset
*/
public static <T> void reset(T ... mocks) {
MOCKITO_CORE.reset(mocks);
}
/**
* Checks if any of given mocks has any unverified interaction.
* <p>
* You can use this method after you verified your mocks - to make sure that nothing
* else was invoked on your mocks.
* <p>
* See also {@link Mockito#never()} - it is more explicit and communicates the intent well.
* <p>
* Stubbed invocations (if called) are also treated as interactions.
* <p>
* A word of <b>warning</b>:
* Some users who did a lot of classic, expect-run-verify mocking tend to use verifyNoMoreInteractions() very often, even in every test method.
* verifyNoMoreInteractions() is not recommended to use in every test method.
* verifyNoMoreInteractions() is a handy assertion from the interaction testing toolkit. Use it only when it's relevant.
* Abusing it leads to overspecified, less maintainable tests. You can find further reading
* <a href="http://monkeyisland.pl/2008/07/12/should-i-worry-about-the-unexpected/">here</a>.
* <p>
* This method will also detect unverified invocations that occurred before the test method,
* for example: in setUp(), @Before method or in constructor.
* Consider writing nice code that makes interactions only in test methods.
*
* <p>
* Example:
*
* <pre>
* //interactions
* mock.doSomething();
* mock.doSomethingUnexpected();
*
* //verification
* verify(mock).doSomething();
*
* //following will fail because 'doSomethingUnexpected()' is unexpected
* verifyNoMoreInteractions(mock);
*
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param mocks to be verified
*/
public static void verifyNoMoreInteractions(Object... mocks) {
MOCKITO_CORE.verifyNoMoreInteractions(mocks);
}
/**
* Verifies that no interactions happened on given mocks.
* <pre>
* verifyZeroInteractions(mockOne, mockTwo);
* </pre>
* This method will also detect invocations
* that occurred before the test method, for example: in setUp(), @Before method or in constructor.
* Consider writing nice code that makes interactions only in test methods.
* <p>
* See also {@link Mockito#never()} - it is more explicit and communicates the intent well.
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param mocks to be verified
*/
public static void verifyZeroInteractions(Object... mocks) {
MOCKITO_CORE.verifyNoMoreInteractions(mocks);
}
/**
* <pre>
* //Instead of:
* stubVoid(mock).toThrow(e).on().someVoidMethod();
*
* //Please do:
* doThrow(e).when(mock).someVoidMethod();
* </pre>
*
* doThrow() replaces stubVoid() because of improved readability and consistency with the family of doAnswer() methods.
* <p>
* Originally, stubVoid() was used for stubbing void methods with exceptions. E.g:
*
* <pre>
* stubVoid(mock).toThrow(new RuntimeException()).on().someMethod();
*
* //you can stub with different behavior for consecutive calls.
* //Last stubbing (e.g. toReturn()) determines the behavior for further consecutive calls.
* stubVoid(mock)
* .toThrow(new RuntimeException())
* .toReturn()
* .on().someMethod();
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @deprecated Use {@link Mockito#doThrow(Throwable)} method for stubbing voids
*
* @param mock
* to stub
* @return stubbable object that allows stubbing with throwable
*/
public static <T> VoidMethodStubbable<T> stubVoid(T mock) {
return MOCKITO_CORE.stubVoid(mock);
}
/**
* Use doThrow() when you want to stub the void method with an exception.
* <p>
* Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets...
* <p>
* Example:
*
* <pre>
* doThrow(new RuntimeException()).when(mock).someVoidMethod();
* </pre>
*
* @param toBeThrown to be thrown when the stubbed method is called
* @return stubber - to select a method for stubbing
*/
public static Stubber doThrow(Throwable toBeThrown) {
return MOCKITO_CORE.doAnswer(new ThrowsException(toBeThrown));
}
/**
* Use doCallRealMethod() when you want to call the real implementation of a method.
* <p>
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven & well-designed code.
* <p>
* See also javadoc {@link Mockito#spy(Object)} to find out more about partial mocks.
* <b>Mockito.spy() is a recommended way of creating partial mocks.</b>
* The reason is it guarantees real methods are called against correctly constructed object because you're responsible for constructing the object passed to spy() method.
* <p>
* Example:
* <pre>
* Foo mock = mock(Foo.class);
* doCallRealMethod().when(mock).someVoidMethod();
*
* // this will call the real implementation of Foo.someVoidMethod()
* mock.someVoidMethod();
* </pre>
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return stubber - to select a method for stubbing
*/
public static Stubber doCallRealMethod() {
return MOCKITO_CORE.doAnswer(new CallsRealMethods());
}
/**
* Use doAnswer() when you want to stub a void method with generic {@link Answer}.
* <p>
* Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets...
* <p>
* Example:
*
* <pre>
* doAnswer(new Answer() {
* public Object answer(InvocationOnMock invocation) {
* Object[] args = invocation.getArguments();
* Mock mock = invocation.getMock();
* return null;
* }})
* .when(mock).someMethod();
* </pre>
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param answer to answer when the stubbed method is called
* @return stubber - to select a method for stubbing
*/
public static Stubber doAnswer(Answer answer) {
return MOCKITO_CORE.doAnswer(answer);
}
/**
* Use doNothing() for setting void methods to do nothing. <b>Beware that void methods on mocks do nothing by default!</b>
* However, there are rare situations when doNothing() comes handy:
* <p>
* 1. Stubbing consecutive calls on a void method:
* <pre>
* doNothing().
* doThrow(new RuntimeException())
* .when(mock).someVoidMethod();
*
* //does nothing the first time:
* mock.someVoidMethod();
*
* //throws RuntimeException the next time:
* mock.someVoidMethod();
* </pre>
*
* 2. When you spy real objects and you want the void method to do nothing:
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //let's make clear() do nothing
* doNothing().when(spy).clear();
*
* spy.add("one");
*
* //clear() does nothing, so the list still contains "one"
* spy.clear();
* </pre>
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return stubber - to select a method for stubbing
*/
public static Stubber doNothing() {
return MOCKITO_CORE.doAnswer(new DoesNothing());
}
/**
* Use doReturn() in those rare occasions when you cannot use {@link Mockito#when(Object)}.
* <p>
* <b>Beware that {@link Mockito#when(Object)} is always recommended for stubbing because it is argument type-safe
* and more readable</b> (especially when stubbing consecutive calls).
* <p>
* Here are those rare occasions when doReturn() comes handy:
* <p>
*
* 1. When spying real objects and calling real methods on a spy brings side effects
*
* <pre>
* List list = new LinkedList();
* List spy = spy(list);
*
* //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty)
* when(spy.get(0)).thenReturn("foo");
*
* //You have to use doReturn() for stubbing:
* doReturn("foo").when(spy).get(0);
* </pre>
*
* 2. Overriding a previous exception-stubbing:
*
* <pre>
* when(mock.foo()).thenThrow(new RuntimeException());
*
* //Impossible: the exception-stubbed foo() method is called so RuntimeException is thrown.
* when(mock.foo()).thenReturn("bar");
*
* //You have to use doReturn() for stubbing:
* doReturn("bar").when(mock).foo();
* </pre>
*
* Above scenarios shows a tradeoff of Mockito's ellegant syntax. Note that the scenarios are very rare, though.
* Spying should be sporadic and overriding exception-stubbing is very rare. Not to mention that in general
* overridding stubbing is a potential code smell that points out too much stubbing.
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param toBeReturned to be returned when the stubbed method is called
* @return stubber - to select a method for stubbing
*/
public static Stubber doReturn(Object toBeReturned) {
return MOCKITO_CORE.doAnswer(new Returns(toBeReturned));
}
/**
* Creates InOrder object that allows verifying mocks in order.
*
* <pre>
* InOrder inOrder = inOrder(firstMock, secondMock);
*
* inOrder.verify(firstMock).add("was called first");
* inOrder.verify(secondMock).add("was called second");
* </pre>
*
* Verification in order is flexible - <b>you don't have to verify all interactions</b> one-by-one
* but only those that you are interested in testing in order.
* <p>
* Also, you can create InOrder object passing only mocks that are relevant for in-order verification.
* <p>
* InOrder verification is 'greedy'. You will hardly every notice it but
* if you want to find out more search for 'greedy' on the Mockito
* <a href="http://code.google.com/p/mockito/w/list">wiki pages</a>.
* <p>
* As of Mockito 1.8.4 you can verifyNoMoreInvocations() in order-sensitive way. Read more: {@link InOrder#verifyNoMoreInteractions()}
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @param mocks to be verified in order
*
* @return InOrder object to be used to verify in order
*/
public static InOrder inOrder(Object... mocks) {
return MOCKITO_CORE.inOrder(mocks);
}
/**
* Allows verifying exact number of invocations. E.g:
* <pre>
* verify(mock, times(2)).someMethod("some arg");
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param wantedNumberOfInvocations wanted number of invocations
*
* @return verification mode
*/
public static VerificationMode times(int wantedNumberOfInvocations) {
return VerificationModeFactory.times(wantedNumberOfInvocations);
}
/**
* Alias to times(0), see {@link Mockito#times(int)}
* <p>
* Verifies that interaction did not happen. E.g:
* <pre>
* verify(mock, never()).someMethod();
* </pre>
*
* <p>
* If you want to verify there were NO interactions with the mock
* check out {@link Mockito#verifyZeroInteractions(Object...)}
* or {@link Mockito#verifyNoMoreInteractions(Object...)}
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return verification mode
*/
public static VerificationMode never() {
return times(0);
}
/**
* Allows at-least-once verification. E.g:
* <pre>
* verify(mock, atLeastOnce()).someMethod("some arg");
* </pre>
* Alias to atLeast(1)
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return verification mode
*/
public static VerificationMode atLeastOnce() {
return VerificationModeFactory.atLeastOnce();
}
/**
* Allows at-least-x verification. E.g:
* <pre>
* verify(mock, atLeast(3)).someMethod("some arg");
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param minNumberOfInvocations minimum number of invocations
*
* @return verification mode
*/
public static VerificationMode atLeast(int minNumberOfInvocations) {
return VerificationModeFactory.atLeast(minNumberOfInvocations);
}
/**
* Allows at-most-x verification. E.g:
* <pre>
* verify(mock, atMost(3)).someMethod("some arg");
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param maxNumberOfInvocations max number of invocations
*
* @return verification mode
*/
public static VerificationMode atMost(int maxNumberOfInvocations) {
return VerificationModeFactory.atMost(maxNumberOfInvocations);
}
/**
* Allows checking if given method was the only one invoked. E.g:
* <pre>
* verify(mock, only()).someMethod();
* //above is a shorthand for following 2 lines of code:
* verify(mock).someMethod();
* verifyNoMoreInvocations(mock);
* </pre>
*
* <p>
* See also {@link Mockito#verifyNoMoreInteractions(Object...)}
* <p>
* See examples in javadoc for {@link Mockito} class
*
* @return verification mode
*/
//TODO make exception message nicer
public static VerificationMode only() {
return VerificationModeFactory.only();
}
/**
* Allows verifying with timeout. May be useful for testing in concurrent conditions.
* <p>
* It feels this feature should be used rarely - figure out a better way of testing your multi-threaded system
* <p>
* Not yet implemented to work with InOrder verification.
* <pre>
* //passes when someMethod() is called within given time span
* verify(mock, timeout(100)).someMethod();
* //above is an alias to:
* verify(mock, timeout(100).times(1)).someMethod();
*
* //passes when someMethod() is called *exactly* 2 times within given time span
* verify(mock, timeout(100).times(2)).someMethod();
*
* //passes when someMethod() is called *at lest* 2 times within given time span
* verify(mock, timeout(100).atLeast(2)).someMethod();
*
* //verifies someMethod() within given time span using given verification mode
* //useful only if you have your own custom verification modes.
* verify(mock, new Timeout(100, yourOwnVerificationMode)).someMethod();
* </pre>
*
* See examples in javadoc for {@link Mockito} class
*
* @param millis - time span in millis
*
* @return verification mode
*/
public static VerificationWithTimeout timeout(int millis) {
return new Timeout(millis, VerificationModeFactory.times(1));
}
/**
* First of all, in case of any trouble, I encourage you to read the Mockito FAQ: <a href="http://code.google.com/p/mockito/wiki/FAQ">http://code.google.com/p/mockito/wiki/FAQ</a>
* <p>
* In case of questions you may also post to mockito mailing list: <a href="http://groups.google.com/group/mockito">http://groups.google.com/group/mockito</a>
* <p>
* validateMockitoUsage() <b>explicitly validates</b> the framework state to detect invalid use of Mockito.
* However, this feature is optional <b>because Mockito validates the usage all the time...</b> but there is a gotcha so read on.
* <p>
* Examples of incorrect use:
* <pre>
* //Oups, someone forgot thenReturn() part:
* when(mock.get());
*
* //Oups, someone put the verified method call inside verify() where it should be outside:
* verify(mock.execute());
*
* //Oups, someone has used EasyMock for too long and forgot to specify the method to verify:
* verify(mock);
* </pre>
*
* Mockito throws exceptions if you misuse it so that you know if your tests are written correctly.
* The gotcha is that Mockito does the validation <b>next time</b> you use the framework (e.g. next time you verify, stub, call mock etc.).
* But even though the exception might be thrown in the next test,
* the exception <b>message contains a navigable stack trace element</b> with location of the defect.
* Hence you can click and find the place where Mockito was misused.
* <p>
* Sometimes though, you might want to validate the framework usage explicitly.
* For example, one of the users wanted to put validateMockitoUsage() in his @After method
* so that he knows immediately when he misused Mockito.
* Without it, he would have known about it not sooner than <b>next time</b> he used the framework.
* One more benefit of having validateMockitoUsage() in @After is that jUnit runner will always fail in the test method with defect
* whereas ordinary 'next-time' validation might fail the <b>next</b> test method.
* But even though JUnit might report next test as red, don't worry about it
* and just click at navigable stack trace element in the exception message to instantly locate the place where you misused mockito.
* <p>
* <b>Built-in runner: {@link MockitoJUnitRunner}</b> does validateMockitoUsage() after each test method.
* <p>
* Bear in mind that <b>usually you don't have to validateMockitoUsage()</b>
* and framework validation triggered on next-time basis should be just enough,
* mainly because of enhanced exception message with clickable location of defect.
* However, I would recommend validateMockitoUsage() if you already have sufficient test infrastructure
* (like your own runner or base class for all tests) because adding a special action to @After has zero cost.
* <p>
* See examples in javadoc for {@link Mockito} class
*/
public static void validateMockitoUsage() {
MOCKITO_CORE.validateMockitoUsage();
}
/**
* Allows mock creation with additional mock settings.
* <p>
* Don't use it too often.
* Consider writing simple tests that use simple mocks.
* Repeat after me: simple tests push simple, KISSy, readable & maintainable code.
* If you cannot write a test in a simple way - refactor the code under test.
* <p>
* Examples of mock settings:
* <pre>
* //Creates mock with different default answer & name
* Foo mock = mock(Foo.class, withSettings()
* .defaultAnswer(RETURNS_SMART_NULLS)
* .name("cool mockie"));
*
* //Creates mock with different default answer, descriptive name and extra interfaces
* Foo mock = mock(Foo.class, withSettings()
* .defaultAnswer(RETURNS_SMART_NULLS)
* .name("cool mockie")
* .extraInterfaces(Bar.class));
* </pre>
* {@link MockSettings} has been introduced for two reasons.
* Firstly, to make it easy to add another mock settings when the demand comes.
* Secondly, to enable combining different mock settings without introducing zillions of overloaded mock() methods.
* <p>
* See javadoc for {@link MockSettings} to learn about possible mock settings.
* <p>
*
* @return mock settings instance with defaults.
*/
public static MockSettings withSettings() {
return new MockSettingsImpl().defaultAnswer(RETURNS_DEFAULTS);
}
/*
* Helps debugging failing tests. Experimental - use at your own risk.
*/
@Deprecated
static MockitoDebugger debug() {
return new MockitoDebuggerImpl();
}
}
| Added new Javadoc point (23) and precisions on point 21
| src/org/mockito/Mockito.java | Added new Javadoc point (23) and precisions on point 21 |
|
Java | mit | 4d84c7baeda81240eeeb66cf08a088dfcf27cccc | 0 | Bhek/Final-Year-Project,Bhek/Final-Year-Project,Bhek/Final-Year-Project | package com.example.visionapp;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfFloat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import com.googlecode.tesseract.android.TessBaseAPI;
import android.app.Activity;
import android.app.Fragment;
import android.content.Intent;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
public class ImageActivity extends Activity {
Mat image, yellow, hist;
public static final String DATA_PATH = Environment.getExternalStorageDirectory().toString() + "/VisionBusApp/";
public static final String lang = "eng";
private static final String TAG = "ImageActivity.java";
private static String stopNumber;
@Override
protected void onCreate(Bundle savedInstanceState) {
String[] paths = new String[] {DATA_PATH, DATA_PATH + "tessdata/"};
for (String path: paths) {
File dir = new File(path);
if (!dir.exists()) {
if (!dir.mkdirs()) {
Log.v(TAG, "ERROR: Creation of directory " + path + " on sdcard failed");
return;
}
else {
Log.v(TAG, "Created directory " + path + " on sdcard");
}
}
}
if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata")).exists()) {
try {
AssetManager am = getResources().getAssets();
InputStream is = am.open("tessdata/" + lang + ".traineddata");
OutputStream os = new FileOutputStream(DATA_PATH + "tessData/" + lang + ".traineddata");
byte[] buf = new byte[1024];
int len;
while ((len = is.read(buf)) > 0) {
os.write(buf, 0, len);
}
is.close();
os.close();
Log.v(TAG, "Copied " + lang + ".traineddata");
} catch (IOException e) {
Log.v(TAG, "Was unable to copy eng.traineddata " + e.toString());
}
}
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_image);
if (savedInstanceState == null) {
getFragmentManager().beginTransaction()
.add(R.id.container, new PlaceholderFragment()).commit();
}
}
protected void onResume() {
super.onResume();
try {
processImage();
} catch (Exception e) {
e.printStackTrace();
}
}
public void processImage() throws Exception {
Bitmap bitmap = null;
Bundle extras = getIntent().getExtras();
if (extras != null) {
bitmap = (Bitmap) getIntent().getParcelableExtra("image");
}
Bitmap temp = bitmap.copy(bitmap.getConfig(), true);
FileOutputStream out = null;
String timeStamp = new SimpleDateFormat("yyyy.MM.dd.HH.mm.ss").format(new java.util.Date());
File file = new File(DATA_PATH + timeStamp + ".png");
try {
out = new FileOutputStream(file);
temp.compress(Bitmap.CompressFormat.PNG, 100, out);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (out != null) {
out.close();
}
}
AssetManager asset = getResources().getAssets();
InputStream in = asset.open("sign.jpg");
bitmap = BitmapFactory.decodeStream(in);
Bitmap signBitmap = bitmap.copy(bitmap.getConfig(), true);
ImageView mImageView = (ImageView) findViewById(R.id.cameraResult);
mImageView.setImageBitmap(signBitmap);
image = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);
Utils.bitmapToMat(bitmap, image);
imageProcessing(signBitmap);
Utils.matToBitmap(image, bitmap);
//stopNumber = digitRecognition(bitmap).split("\n")[1].replace(" ", "");
stopNumber = digitRecognition(bitmap);
TextView tv = (TextView) findViewById(R.id.stopNumber);
tv.setText(stopNumber);
}
private void imageProcessing(Bitmap testBitmap) throws IOException {
Mat backProj = backProject();
Mat im1 = new Mat();
Mat im2 = new Mat();
backProj.copyTo(im1);
backProj.copyTo(im2);
backProj.convertTo(im1, CvType.CV_8U);
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(im1, contours, im2, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
double maxArea = 0;
int maxIdX = 0;
for (int i = 0; i < contours.size(); i++) {
double area = Imgproc.contourArea(contours.get(i));
maxIdX = area > maxArea ? i : maxIdX;
maxArea = area > maxArea ? area : maxArea;
}
im1.setTo(new Scalar(0));
Imgproc.drawContours(im1, contours, maxIdX, new Scalar(255), -1);
Imgproc.threshold(backProj, backProj, 10, 255, Imgproc.THRESH_BINARY);
//backProj.copyTo(image);
Core.absdiff(backProj, im1, image);
// Opening for close-up shots
//Imgproc.erode(image, image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
//Imgproc.dilate(image, image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2, 2)));
// Opening for loaded image
Imgproc.erode(image, image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)));
Imgproc.dilate(image, image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(15, 15)));
Utils.matToBitmap(image, testBitmap);
ImageView mImageView = (ImageView) findViewById(R.id.cameraResult);
mImageView.setImageBitmap(testBitmap); }
private Mat backProject() throws IOException {
Mat backProj = new Mat();
AssetManager am = getResources().getAssets();
//InputStream is = am.open("yellow.png");
//InputStream is = am.open("yellow.jpg");
//InputStream is = am.open("yellow b.jpg");
//InputStream is = am.open("yellow c.png");
//InputStream is = am.open("yellow d.png");
//InputStream is = am.open("yellow e.png");
InputStream is = am.open("greyellow.png");
Bitmap yellowBitmap = BitmapFactory.decodeStream(is);
yellow = new Mat(yellowBitmap.getWidth(), yellowBitmap.getHeight(), CvType.CV_8UC1);
Utils.bitmapToMat(yellowBitmap, yellow);
ArrayList<Mat> imageList = new ArrayList<Mat>();
Imgproc.cvtColor(image, backProj, Imgproc.COLOR_BGR2HSV);
imageList.add(backProj);
ArrayList<Mat> yellowList = new ArrayList<Mat>();
Imgproc.cvtColor(yellow, yellow, Imgproc.COLOR_BGR2HSV);
yellowList.add(yellow);
MatOfInt channels = new MatOfInt(0);
Mat hist= new Mat();
MatOfInt histSize = new MatOfInt(25);
MatOfFloat ranges = new MatOfFloat(0, 180);
Imgproc.calcHist(yellowList, channels, new Mat(), hist, histSize, ranges);
Imgproc.calcBackProject(imageList, channels, hist, backProj, ranges, 1);
return backProj;
}
private String digitRecognition(Bitmap bitmap) {
TessBaseAPI tess = new TessBaseAPI();
tess.init(DATA_PATH, lang);
tess.setVariable("tessedit_char_whitelist", "0123456789");
tess.setImage(bitmap);
String stopNumber = tess.getUTF8Text();
tess.end();
return stopNumber;
}
public void goBack(View view) {
finish();
}
public void getRtpi(View view) throws Exception {
Intent intent = new Intent(getBaseContext(), ResultsActivity.class);
intent.putExtra("rtpi stop", stopNumber);
startActivity(intent);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.image, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
public PlaceholderFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_image,
container, false);
return rootView;
}
}
} | src/com/example/visionapp/ImageActivity.java | package com.example.visionapp;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfFloat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import com.googlecode.tesseract.android.TessBaseAPI;
import android.app.Activity;
import android.app.Fragment;
import android.content.Intent;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
public class ImageActivity extends Activity {
Mat image, yellow, hist;
public static final String DATA_PATH = Environment.getExternalStorageDirectory().toString() + "/VisionBusApp/";
public static final String lang = "eng";
private static final String TAG = "ImageActivity.java";
private static String stopNumber;
@Override
protected void onCreate(Bundle savedInstanceState) {
String[] paths = new String[] {DATA_PATH, DATA_PATH + "tessdata/"};
for (String path: paths) {
File dir = new File(path);
if (!dir.exists()) {
if (!dir.mkdirs()) {
Log.v(TAG, "ERROR: Creation of directory " + path + " on sdcard failed");
return;
}
else {
Log.v(TAG, "Created directory " + path + " on sdcard");
}
}
}
if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata")).exists()) {
try {
AssetManager am = getResources().getAssets();
InputStream is = am.open("tessdata/" + lang + ".traineddata");
OutputStream os = new FileOutputStream(DATA_PATH + "tessData/" + lang + ".traineddata");
byte[] buf = new byte[1024];
int len;
while ((len = is.read(buf)) > 0) {
os.write(buf, 0, len);
}
is.close();
os.close();
Log.v(TAG, "Copied " + lang + ".traineddata");
} catch (IOException e) {
Log.v(TAG, "Was unable to copy eng.traineddata " + e.toString());
}
}
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_image);
if (savedInstanceState == null) {
getFragmentManager().beginTransaction()
.add(R.id.container, new PlaceholderFragment()).commit();
}
}
protected void onResume() {
super.onResume();
try {
processImage();
} catch (Exception e) {
e.printStackTrace();
}
}
public void processImage() throws Exception {
Bitmap bitmap = null;
Bundle extras = getIntent().getExtras();
if (extras != null) {
bitmap = (Bitmap) getIntent().getParcelableExtra("image");
}
Bitmap temp = bitmap.copy(bitmap.getConfig(), true);
FileOutputStream out = null;
String timeStamp = new SimpleDateFormat("yyyy.MM.dd.HH.mm.ss").format(new java.util.Date());
File file = new File(DATA_PATH + timeStamp + ".png");
try {
out = new FileOutputStream(file);
temp.compress(Bitmap.CompressFormat.PNG, 100, out);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (out != null) {
out.close();
}
}
AssetManager asset = getResources().getAssets();
InputStream in = asset.open("sign.jpg");
//bitmap = BitmapFactory.decodeStream(in);
Bitmap signBitmap = bitmap.copy(bitmap.getConfig(), true);
ImageView mImageView = (ImageView) findViewById(R.id.cameraResult);
mImageView.setImageBitmap(signBitmap);
image = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);
Utils.bitmapToMat(bitmap, image);
imageProcessing(signBitmap);
Utils.matToBitmap(image, bitmap);
//stopNumber = digitRecognition(bitmap).split("\n")[1].replace(" ", "");
stopNumber = digitRecognition(bitmap);
TextView tv = (TextView) findViewById(R.id.stopNumber);
tv.setText(stopNumber);
}
private void imageProcessing(Bitmap testBitmap) throws IOException {
Mat backProj = backProject();
Mat im1 = new Mat();
Mat im2 = new Mat();
backProj.copyTo(im1);
backProj.copyTo(im2);
backProj.convertTo(im1, CvType.CV_8U);
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(im1, contours, im2, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
double maxArea = 0;
int maxIdX = 0;
for (int i = 0; i < contours.size(); i++) {
double area = Imgproc.contourArea(contours.get(i));
maxIdX = area > maxArea ? i : maxIdX;
maxArea = area > maxArea ? area : maxArea;
}
im1.setTo(new Scalar(0));
Imgproc.drawContours(im1, contours, maxIdX, new Scalar(255), -1);
Imgproc.threshold(backProj, backProj, 10, 255, Imgproc.THRESH_BINARY);
//backProj.copyTo(image);
Core.absdiff(backProj, im1, image);
// Opening for close-up shots
Imgproc.erode(image, image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
Imgproc.dilate(image, image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2, 2)));
Utils.matToBitmap(image, testBitmap);
ImageView mImageView = (ImageView) findViewById(R.id.cameraResult);
mImageView.setImageBitmap(testBitmap); }
private Mat backProject() throws IOException {
Mat backProj = new Mat();
AssetManager am = getResources().getAssets();
//InputStream is = am.open("yellow.png");
//InputStream is = am.open("yellow.jpg");
//InputStream is = am.open("yellow b.jpg");
//InputStream is = am.open("yellow c.png");
//InputStream is = am.open("yellow d.png");
//InputStream is = am.open("yellow e.png");
InputStream is = am.open("greyellow.png");
Bitmap yellowBitmap = BitmapFactory.decodeStream(is);
yellow = new Mat(yellowBitmap.getWidth(), yellowBitmap.getHeight(), CvType.CV_8UC1);
Utils.bitmapToMat(yellowBitmap, yellow);
ArrayList<Mat> imageList = new ArrayList<Mat>();
Imgproc.cvtColor(image, backProj, Imgproc.COLOR_BGR2HSV);
imageList.add(backProj);
//MatOfInt ch = new MatOfInt(0);
//Core.mixChannels(imageList, imageList, ch);
ArrayList<Mat> yellowList = new ArrayList<Mat>();
Imgproc.cvtColor(yellow, yellow, Imgproc.COLOR_BGR2HSV);
yellowList.add(yellow);
//Core.mixChannels(yellowList, yellowList, ch);
MatOfInt channels = new MatOfInt(0);
Mat hist= new Mat();
MatOfInt histSize = new MatOfInt(25);
MatOfFloat ranges = new MatOfFloat(0, 180);
Imgproc.calcHist(yellowList, channels, new Mat(), hist, histSize, ranges);
Imgproc.calcBackProject(imageList, channels, hist, backProj, ranges, 1);
return backProj;
}
private String digitRecognition(Bitmap bitmap) {
TessBaseAPI tess = new TessBaseAPI();
tess.init(DATA_PATH, lang);
tess.setVariable("tessedit_char_whitelist", "0123456789");
tess.setImage(bitmap);
String stopNumber = tess.getUTF8Text();
tess.end();
return stopNumber;
}
public void goBack(View view) {
finish();
}
public void getRtpi(View view) throws Exception {
Intent intent = new Intent(getBaseContext(), ResultsActivity.class);
intent.putExtra("rtpi stop", stopNumber);
startActivity(intent);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.image, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
public PlaceholderFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_image,
container, false);
return rootView;
}
}
} | (re-)added opening for loaded image
| src/com/example/visionapp/ImageActivity.java | (re-)added opening for loaded image |
|
Java | mit | ac29f735c3716d488c8e5770b105a0195f2e57d8 | 0 | amoz871/charplan | package com.daoc.charplan.ui.common;
import android.content.Context;
import android.database.DataSetObserver;
import android.graphics.Color;
import android.support.v4.content.ContextCompat;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
import android.view.View;
import android.widget.HorizontalScrollView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.daoc.charplan.R;
import com.daoc.charplan.util.Log;
/**
* To be used with {@link ViewPager} to provide a tab indicator component which give constant
* feedback as to the user's scroll progress.
*/
public class SlidingTabLayout extends HorizontalScrollView {
/**
* {@link SlidingTabStrip}.
*/
private final SlidingTabStrip mTabStrip;
/**
* Minimum width of tab item
*/
private final int mTabMinWidth;
/**
* Offset between tab titles.
*/
private final int mTitleOffset;
/**
* {@link ViewPager} to hold the Fragments.
*/
private ViewPager mViewPager;
/**
* Constructor for SlidingTabLayout.
*/
public SlidingTabLayout(Context context, AttributeSet attrs) {
super(context, attrs, 0);
mTabMinWidth = (int) getResources().getDimension(R.dimen.tab_min_width);
// Disable the Scroll Bar
setHorizontalScrollBarEnabled(false);
// Make sure that the Tab Strips fills this View
setFillViewport(true);
mTitleOffset = (int) getResources().getDimension(R.dimen.tab_title_offset);
mTabStrip = new SlidingTabStrip(context);
addView(mTabStrip, LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT);
}
/**
* Create view to be used for tab.
*/
protected LinearLayout createTabView(final Context context) {
return (LinearLayout) View.inflate(context, R.layout.sliding_tab_item_view, null);
}
/**
* Sets the associated view pager. Note that the assumption here is that the pager content
* (number of tabs and tab titles) does not change after this call has been made.
*
* @param viewPager which {@link ViewPager} to use.
*/
public void setViewPager(final ViewPager viewPager) {
mTabStrip.removeAllViews();
mViewPager = viewPager;
setFillViewport(true);
final DataSetObserver observer = new DataSetObserver() {
/**
* {@inheritDoc}
*/
@Override
public void onChanged() {
super.onChanged();
populateTabStrip();
}
/**
* {@inheritDoc}
*/
@Override
public void onInvalidated() {
super.onInvalidated();
populateTabStrip();
}
};
mViewPager.getAdapter().registerDataSetObserver(observer);
viewPager.addOnPageChangeListener(new InternalViewPagerListener());
populateTabStrip();
}
/**
* Initialize the tabs.
*/
private void populateTabStrip() {
final PagerAdapter adapter = mViewPager.getAdapter();
final OnClickListener tabClickListener = new TabClickListener();
for (int index = 0; index < adapter.getCount(); index++) {
final LinearLayout tabView;
if (mTabStrip.getChildCount() < adapter.getCount()) {
tabView = createTabView(getContext());
} else {
tabView = (LinearLayout) mTabStrip.getChildAt(index);
}
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(
new LayoutParams(0, LayoutParams.WRAP_CONTENT));
layoutParams.weight = 1;
tabView.setLayoutParams(layoutParams);
final TextView titleView = tabView.findViewById(android.R.id.text1);
if (titleView != null) {
titleView.setText(adapter.getPageTitle(index).toString());
tabView.setOnClickListener(tabClickListener);
if (tabView.getParent() == null) {
mTabStrip.addView(tabView);
}
if (index == mViewPager.getCurrentItem()) {
titleView.setTextColor(ContextCompat.getColor(
getContext(), R.color.colorAccent));
tabView.setSelected(true);
}
}
}
}
/**
* Sets the title in the give index in the {@link SlidingTabLayout}.
*
* @param title The title to set.
* @param index The index of the layout.
*/
public void setTitle(String title, int index) {
final LinearLayout tabView = (LinearLayout) mTabStrip.getChildAt(index);
final TextView titleView = tabView.findViewById(android.R.id.text1);
titleView.setText(title);
}
/**
* {@inheritDoc}
*/
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (mViewPager != null) {
scrollToTab(mViewPager.getCurrentItem(), 0);
}
}
/**
* Scroll to clicked tab.
*
* @param tabIndex of current tab.
* @param positionOffset of the selected tab.
*/
private void scrollToTab(int tabIndex, int positionOffset) {
final int tabStripChildCount = mTabStrip.getChildCount();
if (tabStripChildCount == 0 || tabIndex < 0 || tabIndex >= tabStripChildCount) {
return;
}
final View selectedChild = mTabStrip.getChildAt(tabIndex);
if (selectedChild != null) {
int targetScrollX = selectedChild.getLeft() + positionOffset;
if (tabIndex > 0 || positionOffset > 0) {
// If we're not at the first child and are mid-scroll, make sure we obey the offset
targetScrollX -= mTitleOffset;
}
scrollTo(targetScrollX, 0);
}
}
/**
* Callback for responding to changing state of the selected page.
*/
private class InternalViewPagerListener implements ViewPager.OnPageChangeListener {
/**
* Current Scroll state.
*/
private int mScrollState;
/**
* {@inheritDoc}
*/
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
int tabStripChildCount = mTabStrip.getChildCount();
if ((tabStripChildCount == 0) || (position < 0) || (position >= tabStripChildCount)) {
return;
}
mTabStrip.onViewPagerPageChanged(position, positionOffset);
final View selectedTitle = mTabStrip.getChildAt(position);
int extraOffset = (selectedTitle != null)
? (int) (positionOffset * selectedTitle.getWidth()) : 0;
scrollToTab(position, extraOffset);
}
/**
* {@inheritDoc}
*/
@Override
public void onPageScrollStateChanged(int state) {
mScrollState = state;
}
/**
* {@inheritDoc}
*/
@Override
public void onPageSelected(int position) {
if (mScrollState == ViewPager.SCROLL_STATE_IDLE) {
mTabStrip.onViewPagerPageChanged(position, 0f);
scrollToTab(position, 0);
}
final int size = mTabStrip.getChildCount();
for (int i = 0; i < size; i++) {
mTabStrip.getChildAt(i).setSelected(position == i);
if (i != position) {
((TextView) mTabStrip.getChildAt(i).findViewById(android.R.id.text1))
.setTextColor(Color.WHITE);
} else {
((TextView) mTabStrip.getChildAt(i).findViewById(android.R.id.text1))
.setTextColor(ContextCompat.getColor(
getContext(), R.color.colorAccent));
}
}
}
}
/**
* Callback when clicking a tab.
*/
private class TabClickListener implements OnClickListener {
/**
* {@inheritDoc}
*/
@Override
public void onClick(View view) {
final int size = mTabStrip.getChildCount();
for (int i = 0; i < size; i++) {
if (view == mTabStrip.getChildAt(i)) {
mViewPager.setCurrentItem(i);
return;
}
}
}
}
} | app/src/main/java/com/daoc/charplan/ui/common/SlidingTabLayout.java | package com.daoc.charplan.ui.common;
import android.content.Context;
import android.database.DataSetObserver;
import android.graphics.Color;
import android.support.v4.content.ContextCompat;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
import android.view.View;
import android.widget.HorizontalScrollView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.daoc.charplan.R;
import com.daoc.charplan.util.Log;
/**
* To be used with {@link ViewPager} to provide a tab indicator component which give constant
* feedback as to the user's scroll progress.
*/
public class SlidingTabLayout extends HorizontalScrollView {
/**
* {@link SlidingTabStrip}.
*/
private final SlidingTabStrip mTabStrip;
/**
* Minimum width of tab item
*/
private final int mTabMinWidth;
/**
* Offset between tab titles.
*/
private final int mTitleOffset;
/**
* Used to depict if the tabs should be distributed evenly.
*/
private boolean mDistributeEvenly;
/**
* {@link ViewPager} to hold the Fragments.
*/
private ViewPager mViewPager;
/**
* Constructor for SlidingTabLayout.
*/
public SlidingTabLayout(Context context, AttributeSet attrs) {
super(context, attrs, 0);
mTabMinWidth = (int) getResources().getDimension(R.dimen.tab_min_width);
// Disable the Scroll Bar
setHorizontalScrollBarEnabled(false);
// Make sure that the Tab Strips fills this View
setFillViewport(true);
setDistributeEvenly();
mTitleOffset = (int) getResources().getDimension(R.dimen.tab_title_offset);
mTabStrip = new SlidingTabStrip(context);
addView(mTabStrip, LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT);
}
/**
* Called if the tabs should be distributed evenly in the {@link LinearLayout}.
*/
public void setDistributeEvenly() {
mDistributeEvenly = true;
}
/**
* Create view to be used for tab.
*/
protected LinearLayout createTabView(final Context context) {
return (LinearLayout) View.inflate(context, R.layout.sliding_tab_item_view, null);
}
/**
* Sets the associated view pager. Note that the assumption here is that the pager content
* (number of tabs and tab titles) does not change after this call has been made.
*
* @param viewPager which {@link ViewPager} to use.
*/
public void setViewPager(final ViewPager viewPager) {
mTabStrip.removeAllViews();
mViewPager = viewPager;
final DataSetObserver observer = new DataSetObserver() {
/**
* {@inheritDoc}
*/
@Override
public void onChanged() {
super.onChanged();
populateTabStrip();
}
/**
* {@inheritDoc}
*/
@Override
public void onInvalidated() {
super.onInvalidated();
populateTabStrip();
}
};
mViewPager.getAdapter().registerDataSetObserver(observer);
viewPager.addOnPageChangeListener(new InternalViewPagerListener());
populateTabStrip();
}
/**
* Initialize the tabs.
*/
private void populateTabStrip() {
final PagerAdapter adapter = mViewPager.getAdapter();
final OnClickListener tabClickListener = new TabClickListener();
for (int index = 0; index < adapter.getCount(); index++) {
final LinearLayout tabView;
if (mTabStrip.getChildCount() < adapter.getCount()) {
tabView = createTabView(getContext());
} else {
tabView = (LinearLayout) mTabStrip.getChildAt(index);
}
if (mDistributeEvenly) {
LinearLayout.LayoutParams layoutParams
= (LinearLayout.LayoutParams) tabView.getLayoutParams();
if (layoutParams != null) {
layoutParams.width = 0;
layoutParams.weight = 1;
tabView.setLayoutParams(layoutParams);
Log.d("Layout params set to even weight");
} else {
Log.e("Layout params NULL!");
}
}
final TextView titleView = (TextView) tabView.findViewById(android.R.id.text1);
if (titleView != null) {
titleView.setText(adapter.getPageTitle(index).toString());
tabView.setOnClickListener(tabClickListener);
if (tabView.getParent() == null) {
mTabStrip.addView(tabView);
}
if (index == mViewPager.getCurrentItem()) {
titleView.setTextColor(ContextCompat.getColor(
getContext(), R.color.colorAccent));
tabView.setSelected(true);
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (mViewPager != null) {
scrollToTab(mViewPager.getCurrentItem(), 0);
}
}
/**
* Scroll to clicked tab.
*
* @param tabIndex of current tab.
* @param positionOffset of the selected tab.
*/
private void scrollToTab(int tabIndex, int positionOffset) {
final int tabStripChildCount = mTabStrip.getChildCount();
if (tabStripChildCount == 0 || tabIndex < 0 || tabIndex >= tabStripChildCount) {
return;
}
final View selectedChild = mTabStrip.getChildAt(tabIndex);
if (selectedChild != null) {
int targetScrollX = selectedChild.getLeft() + positionOffset;
if (tabIndex > 0 || positionOffset > 0) {
// If we're not at the first child and are mid-scroll, make sure we obey the offset
targetScrollX -= mTitleOffset;
}
scrollTo(targetScrollX, 0);
}
}
/**
* Callback for responding to changing state of the selected page.
*/
private class InternalViewPagerListener implements ViewPager.OnPageChangeListener {
/**
* Current Scroll state.
*/
private int mScrollState;
/**
* {@inheritDoc}
*/
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
int tabStripChildCount = mTabStrip.getChildCount();
if ((tabStripChildCount == 0) || (position < 0) || (position >= tabStripChildCount)) {
return;
}
mTabStrip.onViewPagerPageChanged(position, positionOffset);
final View selectedTitle = mTabStrip.getChildAt(position);
int extraOffset = (selectedTitle != null)
? (int) (positionOffset * selectedTitle.getWidth()) : 0;
scrollToTab(position, extraOffset);
}
/**
* {@inheritDoc}
*/
@Override
public void onPageScrollStateChanged(int state) {
mScrollState = state;
}
/**
* {@inheritDoc}
*/
@Override
public void onPageSelected(int position) {
if (mScrollState == ViewPager.SCROLL_STATE_IDLE) {
mTabStrip.onViewPagerPageChanged(position, 0f);
scrollToTab(position, 0);
}
final int size = mTabStrip.getChildCount();
for (int i = 0; i < size; i++) {
mTabStrip.getChildAt(i).setSelected(position == i);
if (i != position) {
((TextView) mTabStrip.getChildAt(i).findViewById(android.R.id.text1))
.setTextColor(Color.WHITE);
} else {
((TextView) mTabStrip.getChildAt(i).findViewById(android.R.id.text1))
.setTextColor(ContextCompat.getColor(
getContext(), R.color.colorAccent));
}
}
}
}
/**
* Callback when clicking a tab.
*/
private class TabClickListener implements OnClickListener {
/**
* {@inheritDoc}
*/
@Override
public void onClick(View view) {
final int size = mTabStrip.getChildCount();
for (int i = 0; i < size; i++) {
if (view == mTabStrip.getChildAt(i)) {
mViewPager.setCurrentItem(i);
return;
}
}
}
}
} | Make SlidingTabLayour distribute evenly
| app/src/main/java/com/daoc/charplan/ui/common/SlidingTabLayout.java | Make SlidingTabLayour distribute evenly |
|
Java | mit | 08ef4cabd424d18fa2dfe6d765729cc9c4041c75 | 0 | martinstraus/wikimark,martinstraus/wikimark | /*
* The MIT License
*
* Copyright 2017 Martín Straus.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.wikimark;
import java.io.IOException;
import java.util.Optional;
import static java.util.stream.Collectors.toList;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.thymeleaf.TemplateEngine;
import org.thymeleaf.context.WebContext;
/**
*
* @author Martín Straus
*/
public class PageServlet extends javax.servlet.http.HttpServlet {
public static String pageName(HttpServletRequest request) {
return request.getPathInfo().substring(1, request.getPathInfo().length());
}
private final TemplateEngine thymeleaf;
private final Context context;
private final Pages pages;
public PageServlet(ServletContext ctx, TemplateEngine thymeleaf, Context context, Pages pages) {
this.thymeleaf = thymeleaf;
this.context = context;
this.pages = pages;
ctx.addServlet(PageServlet.class.getName(), this).addMapping("/pages/*");
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
if (req.getPathInfo() == null) {
search(req, resp);
} else if (req.getPathInfo().endsWith("/edit")) {
showEdit(req, resp);
} else {
showOne(req, resp);
}
}
private void search(HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException {
resp.setHeader("Content-Type", "text/html; charset=utf-8");
WebContext webContext = new WebContext(req, resp, req.getServletContext());
webContext.setVariable(
"pages",
pages
.findByTerms(req.getParameter("query"), 20)
.stream()
.map((page) -> page.pageContext())
.collect(toList())
);
thymeleaf.process("/search-results", webContext, resp.getWriter());
}
private void showOne(HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException {
final Optional<Page> foundPage = new PageRequest(pages, req).page();
if (foundPage.isPresent()) {
resp.setHeader("Content-Type", "text/html; charset=utf-8");
final Page page = foundPage.get();
WebContext webContext = new WebContext(req, resp, req.getServletContext());
webContext.setVariable("page", page.pageContext());
thymeleaf.process("/page", webContext, resp.getWriter());
} else {
new Response(resp).notFound();
}
}
private void showEdit(HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException {
final Optional<Page> page = pageToEdit(req);
if (!page.isPresent()) {
new Response(resp).notFound();
} else {
WebContext webContext = new WebContext(req, resp, req.getServletContext());
webContext.setVariable("page", page.get().pageContext());
resp.setCharacterEncoding("UTF-8");
thymeleaf.process("/edit-page", webContext, resp.getWriter());
}
}
private Optional<Page> pageToEdit(HttpServletRequest req) {
return pages.find(req.getPathInfo().substring(0, req.getPathInfo().indexOf("/edit")));
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
req.setCharacterEncoding("UTF-8");
switch (new Request(req).parameter("_method", "post")) {
case "post":
create(req, resp);
break;
case "put":
edit(req, resp);
break;
default:
new Response(resp).badRequest();
}
}
private void create(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
final Page page = new CreatePageForm(req).create(pages);
new Response(resp).redirectTo(page.urlRelativeToHost(context));
}
private void edit(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
try {
final Page page = new EditPageForm(req, pages).edit(resp);
new Response(resp).redirectTo(page.urlRelativeToHost(context));
} catch (IllegalArgumentException ex) {
new Response(resp).notFound();
}
}
}
| src/main/java/org/wikimark/PageServlet.java | /*
* The MIT License
*
* Copyright 2017 Martín Straus.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.wikimark;
import java.io.IOException;
import java.util.Optional;
import static java.util.stream.Collectors.toList;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.thymeleaf.TemplateEngine;
import org.thymeleaf.context.WebContext;
/**
*
* @author Martín Straus
*/
public class PageServlet extends javax.servlet.http.HttpServlet {
public static String pageName(HttpServletRequest request) {
return request.getPathInfo().substring(1, request.getPathInfo().length());
}
private final TemplateEngine thymeleaf;
private final Context context;
private final Pages pages;
public PageServlet(ServletContext ctx, TemplateEngine thymeleaf, Context context, Pages pages) {
this.thymeleaf = thymeleaf;
this.context = context;
this.pages = pages;
ctx.addServlet(PageServlet.class.getName(), this).addMapping("/pages/*");
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
if (req.getPathInfo() == null) {
search(req, resp);
} else if (req.getPathInfo().endsWith("/edit")) {
showEdit(req, resp);
} else {
showOne(req, resp);
}
}
private void search(HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException {
resp.setHeader("Content-Type", "text/html; charset=utf-8");
WebContext webContext = new WebContext(req, resp, req.getServletContext());
webContext.setVariable(
"pages",
pages
.findByTerms(req.getParameter("query"), 20)
.stream()
.map((page) -> page.pageContext())
.collect(toList())
);
thymeleaf.process("/search-results", webContext, resp.getWriter());
}
private void showOne(HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException {
final Optional<Page> foundPage = new PageRequest(pages, req).page();
if (foundPage.isPresent()) {
resp.setHeader("Content-Type", "text/html; charset=utf-8");
final Page page = foundPage.get();
WebContext webContext = new WebContext(req, resp, req.getServletContext());
webContext.setVariable("page", page.pageContext());
thymeleaf.process("/page", webContext, resp.getWriter());
} else {
new Response(resp).notFound();
}
}
private void showEdit(HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException {
final Optional<Page> page = pageToEdit(req);
if (!page.isPresent()) {
new Response(resp).notFound();
} else {
WebContext webContext = new WebContext(req, resp, req.getServletContext());
webContext.setVariable("page", page.get().pageContext());
thymeleaf.process("/edit-page", webContext, resp.getWriter());
}
}
private Optional<Page> pageToEdit(HttpServletRequest req) {
return pages.find(req.getPathInfo().substring(0, req.getPathInfo().indexOf("/edit")));
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
req.setCharacterEncoding("UTF-8");
switch (new Request(req).parameter("_method", "post")) {
case "post":
create(req, resp);
break;
case "put":
edit(req, resp);
break;
default:
new Response(resp).badRequest();
}
}
private void create(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
final Page page = new CreatePageForm(req).create(pages);
new Response(resp).redirectTo(page.urlRelativeToHost(context));
}
private void edit(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
try {
final Page page = new EditPageForm(req, pages).edit(resp);
new Response(resp).redirectTo(page.urlRelativeToHost(context));
} catch (IllegalArgumentException ex) {
new Response(resp).notFound();
}
}
}
| UTF-8 encoding setted before rendering the edit page template. | src/main/java/org/wikimark/PageServlet.java | UTF-8 encoding setted before rendering the edit page template. |
|
Java | mit | 626e57692539e7617bd33c560560be031435c5fe | 0 | ttwd80/qir,ttwd80/qir,ttwd80/qir | package integration.web;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertThat;
import org.junit.Test;
public class WebLoginITCase extends AbstractWebITCase {
@Test
public void loginAdminBad() {
login("admin", "bad");
final String actual = webDriver.getCurrentUrl();
assertThat(actual, equalTo(baseUrl + "/login?fail=true"));
}
@Test
public void loginAdminOk() {
login("admin", "cefew86traqe");
final String actual = webDriver.getCurrentUrl();
assertThat(actual, equalTo(baseUrl + "/admin/index"));
}
@Test
public void loginUserBad() {
login("user01", "bad");
final String actual = webDriver.getCurrentUrl();
assertThat(actual, equalTo(baseUrl + "/login?fail=true"));
}
@Test
public void loginUserOk() {
login("user01", "wruy7cran5tr01");
final String actual = webDriver.getCurrentUrl();
assertThat(actual, equalTo(baseUrl + "/user/index"));
}
}
| src/test/java/integration/web/WebLoginITCase.java | package integration.web;
import static org.hamcrest.core.IsEqual.*;
import static org.junit.Assert.*;
import org.junit.Test;
public class WebLoginITCase extends AbstractWebITCase {
@Test
public void loginAdminBad() {
login("admin", "bad");
final String actual = webDriver.getCurrentUrl();
assertThat(actual, equalTo(baseUrl + "/login?fail=true"));
}
@Test
public void loginAdminOk() {
login("admin", "password123!");
final String actual = webDriver.getCurrentUrl();
assertThat(actual, equalTo(baseUrl + "/admin/index"));
}
@Test
public void loginUserBad() {
login("user01", "bad");
final String actual = webDriver.getCurrentUrl();
assertThat(actual, equalTo(baseUrl + "/login?fail=true"));
}
@Test
public void loginUserOk() {
login("user01", "user01");
final String actual = webDriver.getCurrentUrl();
assertThat(actual, equalTo(baseUrl + "/user/index"));
}
}
| using new user/password info
| src/test/java/integration/web/WebLoginITCase.java | using new user/password info |
|
Java | mit | 7785616e92444cbf3b832cf7d73137f7f405588d | 0 | armandgray/SeeMe,armandgray/SeeMe,armandgray/SeeMe,armandgray/SeeMe | package com.armandgray.seeme.network;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.util.Log;
import android.widget.Toast;
import com.armandgray.seeme.services.HttpService;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import static com.armandgray.seeme.services.HttpService.JSON_BODY;
import static com.armandgray.seeme.services.HttpService.RESPONSE_TYPE;
/**
* Helper class for working with a remote server
*/
public class HttpHelper {
public static final String GET = "GET";
public static final String POST = "POST";
public static final String NOTES = "NOTES";
public static final String TAG = "HTTP_HELPER";
public static void sendPostRequest(String url, String body, Context context) {
if (!isNetworkOk(context)) { return; }
Intent intent = new Intent(context, HttpService.class);
intent.setData(Uri.parse(url));
intent.putExtra(JSON_BODY, body);
context.startService(intent);
}
public static void sendGetRequest(String url, Context context) {
if (!isNetworkOk(context)) { return; }
Intent intent = new Intent(context, HttpService.class);
intent.setData(Uri.parse(url));
context.startService(intent);
}
public static void sendGetRequest(String url, String responseType, Context context) {
if (!isNetworkOk(context)) { return; }
Intent intent = new Intent(context, HttpService.class);
intent.setData(Uri.parse(url));
intent.putExtra(RESPONSE_TYPE, responseType);
context.startService(intent);
}
private static boolean isNetworkOk(Context context) {
if (!NetworkHelper.hasNetworkAccess(context)) {
Toast.makeText(context, "Bad Network Connection", Toast.LENGTH_SHORT).show();
return false;
}
return true;
}
/**
* Returns text from a URL on a web server
* @return
* @throws IOException
*/
public static String downloadUrl(String address, String requestType, String body) throws IOException {
InputStream is = null;
try {
URL url = new URL(address);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(10000);
conn.setConnectTimeout(15000);
conn.setRequestMethod(POST);
conn.setRequestProperty( "Content-Type", "application/json" );
conn.setRequestProperty("Accept", "application/json");
conn.setDoInput(true);
conn.connect();
Log.i(TAG, "downloadUrl()");
if (requestType.equals(POST)) {
Log.i(TAG, POST + ": " + body);
OutputStream os = conn.getOutputStream();
OutputStreamWriter writer = new OutputStreamWriter(os, "UTF-8");
writer.write(body);
writer.flush();
writer.close();
os.close();
}
int responseCode = conn.getResponseCode();
if (responseCode != 200) {
throw new IOException("Got response code " + responseCode);
}
is = conn.getInputStream();
return readStream(is);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (is != null) {
is.close();
}
}
return null;
}
/**
* Reads an InputStream and converts it to a String.
* @return
* @throws IOException
*/
private static String readStream(InputStream stream) throws IOException {
byte[] buffer = new byte[1024];
ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
BufferedOutputStream out = null;
try {
int length = 0;
out = new BufferedOutputStream(byteArray);
while ((length = stream.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
out.flush();
return byteArray.toString();
} catch (IOException e) {
e.printStackTrace();
return null;
} finally {
if (out != null) {
out.close();
}
}
}
}
| SeeMe/app/src/main/java/com/armandgray/seeme/network/HttpHelper.java | package com.armandgray.seeme.network;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.util.Log;
import android.widget.Toast;
import com.armandgray.seeme.services.HttpService;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import static com.armandgray.seeme.services.HttpService.JSON_BODY;
import static com.armandgray.seeme.services.HttpService.RESPONSE_TYPE;
/**
* Helper class for working with a remote server
*/
public class HttpHelper {
public static final String GET = "GET";
public static final String POST = "POST";
public static final String NOTES = "NOTES";
public static final String TAG = "HTTP_HELPER";
public static void sendPostRequest(String url, String body, Context context) {
Intent intent = new Intent(context, HttpService.class);
intent.setData(Uri.parse(url));
intent.putExtra(JSON_BODY, body);
context.startService(intent);
}
public static void sendGetRequest(String url, Context context) {
Intent intent = new Intent(context, HttpService.class);
intent.setData(Uri.parse(url));
context.startService(intent);
}
public static void sendGetRequest(String url, String responseType, Context context) {
Intent intent = new Intent(context, HttpService.class);
intent.setData(Uri.parse(url));
intent.putExtra(RESPONSE_TYPE, responseType);
context.startService(intent);
}
/**
* Returns text from a URL on a web server
* @return
* @throws IOException
*/
public static String downloadUrl(String address, String requestType, String body) throws IOException {
InputStream is = null;
try {
URL url = new URL(address);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(10000);
conn.setConnectTimeout(15000);
conn.setRequestMethod(POST);
conn.setRequestProperty( "Content-Type", "application/json" );
conn.setRequestProperty("Accept", "application/json");
conn.setDoInput(true);
conn.connect();
Log.i(TAG, "downloadUrl()");
if (requestType.equals(POST)) {
Log.i(TAG, POST + ": " + body);
OutputStream os = conn.getOutputStream();
OutputStreamWriter writer = new OutputStreamWriter(os, "UTF-8");
writer.write(body);
writer.flush();
writer.close();
os.close();
}
int responseCode = conn.getResponseCode();
if (responseCode != 200) {
throw new IOException("Got response code " + responseCode);
}
is = conn.getInputStream();
return readStream(is);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (is != null) {
is.close();
}
}
return null;
}
/**
* Reads an InputStream and converts it to a String.
* @return
* @throws IOException
*/
private static String readStream(InputStream stream) throws IOException {
byte[] buffer = new byte[1024];
ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
BufferedOutputStream out = null;
try {
int length = 0;
out = new BufferedOutputStream(byteArray);
while ((length = stream.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
out.flush();
return byteArray.toString();
} catch (IOException e) {
e.printStackTrace();
return null;
} finally {
if (out != null) {
out.close();
}
}
}
private static boolean isNetworkOk(Context context) {
if (!NetworkHelper.hasNetworkAccess(context)) {
Toast.makeText(context, "Bad Network Connection", Toast.LENGTH_SHORT).show();
return false;
}
return true;
}
}
| added calls to isNetworkOk
| SeeMe/app/src/main/java/com/armandgray/seeme/network/HttpHelper.java | added calls to isNetworkOk |
|
Java | mit | bfd91370cd91b161f566e3ffd0257909e047d35b | 0 | apoi/reark,ulx/reark | package com.tehmou.rxbookapp.data.provider;
import android.net.Uri;
import com.tehmou.rxbookapp.data.base.contract.SerializedJsonContract;
import com.tehmou.rxbookapp.data.base.route.DatabaseRouteBase;
import java.util.List;
/**
* Created by ttuo on 04/05/15.
*/
public class NetworkRequestStatusSingleRoute extends DatabaseRouteBase {
private static final String MULTIPLE_MIME_TYPE =
"vnd.android.cursor.dir/vnd.tehmou.android.rxbookapp.networkrequeststatus";
public NetworkRequestStatusSingleRoute(final String tableName) {
super(tableName);
}
@Override
public String getMimeType() {
return MULTIPLE_MIME_TYPE;
}
@Override
public String getWhere(Uri uri) {
int uriHash = Integer.parseInt(uri.getLastPathSegment());
return NetworkRequestStatusContract.ID + " = " + uriHash;
}
public String getDefaultSortOrder() {
return SerializedJsonContract.ID + " ASC";
}
@Override
public String getPath() {
return tableName + "/*";
}
}
| app/src/main/java/com/tehmou/rxbookapp/data/provider/NetworkRequestStatusSingleRoute.java | package com.tehmou.rxbookapp.data.provider;
import android.net.Uri;
import com.tehmou.rxbookapp.data.base.contract.SerializedJsonContract;
import com.tehmou.rxbookapp.data.base.route.DatabaseRouteBase;
import java.util.List;
/**
* Created by ttuo on 04/05/15.
*/
public class NetworkRequestStatusSingleRoute extends DatabaseRouteBase {
private static final String MULTIPLE_MIME_TYPE =
"vnd.android.cursor.dir/vnd.tehmou.android.rxbookapp.networkrequeststatus";
public NetworkRequestStatusSingleRoute(final String tableName) {
super(tableName);
}
@Override
public String getMimeType() {
return MULTIPLE_MIME_TYPE;
}
@Override
public String getWhere(Uri uri) {
int uriHash = Integer.parseInt(uri.getLastPathSegment());
return NetworkRequestStatusContract.ID + " = " + uriHash;
}
public String getDefaultSortOrder() {
return SerializedJsonContract.ID + " ASC";
}
@Override
public String getPath() {
return tableName + "/#";
}
}
| Fix NetworkRequestStatusSingleRoute uri matcher
| app/src/main/java/com/tehmou/rxbookapp/data/provider/NetworkRequestStatusSingleRoute.java | Fix NetworkRequestStatusSingleRoute uri matcher |
|
Java | epl-1.0 | 5a7960309debf6dbd3820f6801118619e13e3606 | 0 | rohitmohan96/ceylon-ide-eclipse,rohitmohan96/ceylon-ide-eclipse | package com.redhat.ceylon.eclipse.imp.wizard;
import static org.eclipse.jdt.core.IJavaElement.PACKAGE_FRAGMENT_ROOT;
import static org.eclipse.jdt.internal.ui.refactoring.nls.SourceContainerDialog.getSourceContainer;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.Status;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IPackageFragment;
import org.eclipse.jdt.core.IPackageFragmentRoot;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.internal.ui.wizards.NewSourceFolderCreationWizard;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.IWizardPage;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Link;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.wizards.IWizardDescriptor;
import com.redhat.ceylon.eclipse.ui.CeylonPlugin;
public class NewUnitWizardPage extends WizardPage implements IWizardPage {
private String unitName;
private IPackageFragmentRoot sourceDir;
private IPackageFragment packageFragment;
private String packageName = "";
private boolean includePreamble = true;
private boolean shared = true;
private boolean declaration;
private final boolean declarationButtonDisabled;
private IStructuredSelection selection;
private IWorkbench workbench;
NewUnitWizardPage(String title, String description,
String defaultUnitName, String icon,
boolean declarationButtonDisabled) {
super(title, title, CeylonPlugin.getInstance()
.getImageRegistry().getDescriptor(icon));
setDescription(description);
unitName = defaultUnitName;
this.declarationButtonDisabled = declarationButtonDisabled;
declaration = declarationButtonDisabled;
}
//TODO: fix copy/paste to ExportModuleWizard
private IJavaElement getSelectedElement() {
if (selection!=null && selection.size()==1) {
Object element = selection.getFirstElement();
if (element instanceof IFile) {
return JavaCore.create(((IFile) element).getParent());
}
else {
return (IJavaElement) ((IAdaptable) element)
.getAdapter(IJavaElement.class);
}
}
else {
return null;
}
}
@Override
public void createControl(Composite parent) {
initializeDialogUnits(parent);
initFromSelection();
Composite composite= new Composite(parent, SWT.NONE);
composite.setFont(parent.getFont());
GridLayout layout = new GridLayout();
layout.numColumns = 4;
composite.setLayout(layout);
createControls(composite);
setControl(composite);
Dialog.applyDialogFont(composite);
setPageComplete(isComplete());
}
void createControls(Composite composite) {
Text name = createNameField(composite);
createDeclarationField(composite);
createSeparator(composite);
Text folder = createFolderField(composite);
createPackageField(composite, folder);
name.forceFocus();
}
void createSeparator(Composite composite) {
Label sep = new Label(composite, SWT.SEPARATOR | SWT.HORIZONTAL);
GridData sgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
sgd.horizontalSpan = 4;
sep.setLayoutData(sgd);
}
Text createNameField(Composite composite) {
Label nameLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
nameLabel.setText(getCompilationUnitLabel());
GridData lgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
lgd.horizontalSpan = 1;
nameLabel.setLayoutData(lgd);
final Text name = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData ngd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
ngd.horizontalSpan = 2;
ngd.grabExcessHorizontalSpace = true;
name.setLayoutData(ngd);
name.setText(unitName);
name.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
unitName = name.getText();
if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else {
setErrorMessage(null);
}
setPageComplete(isComplete());
}
});
new Label(composite, SWT.NONE);
new Label(composite, SWT.NONE);
Button includeHeader = new Button(composite, SWT.CHECK);
includeHeader.setText("Include preamble in 'header.ceylon' in project root");
includeHeader.setSelection(includePreamble);
GridData igd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
igd.horizontalSpan = 3;
igd.grabExcessHorizontalSpace = true;
includeHeader.setLayoutData(igd);
includeHeader.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
includePreamble = !includePreamble;
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Edit 'header.ceylon'...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 2;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (sourceDir==null) {
MessageDialog.openWarning(getShell(), "No Source Folder",
getSelectSourceFolderMessage());
}
else {
EditDialog d = new EditDialog(getShell());
d.setText(readHeader());
if (d.open()==Status.OK) {
saveHeader(d.getText());
}
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
return name;
}
String getCompilationUnitLabel() {
return "Compilation unit name: ";
}
Text createFolderField(Composite composite) {
Label folderLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
folderLabel.setText("Source folder: ");
GridData flgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
flgd.horizontalSpan = 1;
folderLabel.setLayoutData(flgd);
final Text folder = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData fgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
fgd.horizontalSpan = 2;
fgd.grabExcessHorizontalSpace = true;
folder.setLayoutData(fgd);
if (sourceDir!=null) {
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
}
folder.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
setSourceDir(folder.getText());
if (sourceDir!=null && packageNameIsLegal()) {
packageFragment = sourceDir.getPackageFragment(packageName);
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
setPageComplete(isComplete());
}
private void setSourceDir(String folderName) {
try {
sourceDir = null;
for (IJavaProject jp: JavaCore.create(ResourcesPlugin.getWorkspace().getRoot())
.getJavaProjects()) {
for (IPackageFragmentRoot pfr: jp.getPackageFragmentRoots()) {
if (pfr.getPath().toPortableString().equals(folderName)) {
sourceDir = pfr;
return;
}
}
}
}
catch (JavaModelException jme) {
jme.printStackTrace();
}
}
});
Button selectFolder = new Button(composite, SWT.PUSH);
selectFolder.setText("Browse...");
GridData sfgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
sfgd.horizontalSpan = 1;
selectFolder.setLayoutData(sfgd);
selectFolder.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
IPackageFragmentRoot pfr = getSourceContainer(getShell(),
ResourcesPlugin.getWorkspace().getRoot(), sourceDir);
if (pfr!=null) {
sourceDir = pfr;
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
packageFragment = sourceDir.getPackageFragment(packageName);
setPageComplete(isComplete());
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Create new source folder...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 3;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
IPackageFragmentRoot pfr = (IPackageFragmentRoot) openSourceFolderWizard();
if (pfr!=null) {
sourceDir = pfr;
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
packageFragment = sourceDir.getPackageFragment(packageName);
setPageComplete(isComplete());
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return folder;
}
Text createPackageField(Composite composite, final Text folder) {
final Text pkg = createPackageField(composite);
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Create new Ceylon package with descriptor...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 3;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
NewPackageWizard wiz = openPackageWizard();
IPackageFragment pfr = wiz.getPackageFragment();
if (pfr!=null) {
sourceDir = wiz.getSourceFolder();
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
pkg.setText(pfr.getElementName());
packageFragment = pfr;
setPageComplete(isComplete());
}
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return pkg;
}
Text createPackageField(Composite composite) {
Label packageLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
packageLabel.setText(getPackageLabel());
GridData plgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
plgd.horizontalSpan = 1;
packageLabel.setLayoutData(plgd);
final Text pkg = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData pgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
pgd.horizontalSpan = 2;
pgd.grabExcessHorizontalSpace = true;
pkg.setLayoutData(pgd);
pkg.setText(packageName);
pkg.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
packageName = pkg.getText();
if (sourceDir!=null && packageNameIsLegal()) {
packageFragment = sourceDir.getPackageFragment(packageName);
}
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
setPageComplete(isComplete());
}
});
/*if (packageFragment!=null) {
String pkgName = packageFragment.getElementName();
pkg.setText(pkgName);
}*/
Button selectPackage = new Button(composite, SWT.PUSH);
selectPackage.setText("Browse...");
GridData spgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
spgd.horizontalSpan = 1;
selectPackage.setLayoutData(spgd);
selectPackage.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (sourceDir==null) {
MessageDialog.openWarning(getShell(), "No Source Folder",
getSelectSourceFolderMessage());
}
else {
PackageSelectionDialog dialog = new PackageSelectionDialog(getShell(), sourceDir);
dialog.setMultipleSelection(false);
dialog.setTitle("Package Selection");
dialog.setMessage("Select a package:");
dialog.open();
Object result = dialog.getFirstResult();
if (result!=null) {
packageName = ((IPackageFragment) result).getElementName();
pkg.setText(packageName);
if (sourceDir!=null) {
packageFragment = sourceDir.getPackageFragment(packageName);
}
setPageComplete(isComplete());
}
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return pkg;
}
private NewPackageWizard openPackageWizard() {
IWizardDescriptor descriptor = PlatformUI.getWorkbench().getNewWizardRegistry()
.findWizard("com.redhat.ceylon.eclipse.ui.newPackageWizard");
if (descriptor!=null) {
try {
NewPackageWizard wizard = (NewPackageWizard) descriptor.createWizard();
wizard.init(workbench, selection);
WizardDialog wd = new WizardDialog(Display.getCurrent().getActiveShell(),
wizard);
wd.setTitle(wizard.getWindowTitle());
wd.open();
return wizard;
}
catch (CoreException e) {
e.printStackTrace();
}
}
return null;
}
private IJavaElement openSourceFolderWizard() {
IWizardDescriptor descriptor = PlatformUI.getWorkbench().getNewWizardRegistry()
.findWizard("org.eclipse.jdt.ui.wizards.NewSourceFolderCreationWizard");
if (descriptor!=null) {
try {
NewSourceFolderCreationWizard wizard = (NewSourceFolderCreationWizard) descriptor.createWizard();
wizard.init(workbench, selection);
WizardDialog wd = new WizardDialog(Display.getCurrent().getActiveShell(),
wizard);
wd.setTitle(wizard.getWindowTitle());
wd.open();
return wizard.getCreatedElement();
}
catch (CoreException e) {
e.printStackTrace();
}
}
return null;
}
void createSharedField(Composite composite) {
new Label(composite, SWT.NONE);
Button sharedPackage = new Button(composite, SWT.CHECK);
sharedPackage.setText(getSharedPackageLabel());
sharedPackage.setSelection(shared);
GridData igd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
igd.horizontalSpan = 3;
igd.grabExcessHorizontalSpace = true;
sharedPackage.setLayoutData(igd);
sharedPackage.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
shared = !shared;
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
}
void createDeclarationField(Composite composite) {
new Label(composite, SWT.NONE);
Button dec = new Button(composite, SWT.CHECK);
dec.setText("Create toplevel class or method declaration");
dec.setSelection(declaration);
dec.setEnabled(!declarationButtonDisabled);
GridData igd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
igd.horizontalSpan = 3;
igd.grabExcessHorizontalSpace = true;
dec.setLayoutData(igd);
dec.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
declaration = !declaration;
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
}
String getSharedPackageLabel() {
return "Create shared package (visible to other modules)";
}
String getPackageLabel() {
return "Package: ";
}
public void initFromSelection() {
IJavaElement je = getSelectedElement();
if (je instanceof IPackageFragmentRoot) {
sourceDir = (IPackageFragmentRoot) je;
packageFragment = sourceDir.getPackageFragment("");
packageName = packageFragment.getElementName();
}
else if (je instanceof IPackageFragment) {
packageFragment = (IPackageFragment) je;
packageName = packageFragment.getElementName();
sourceDir = (IPackageFragmentRoot) packageFragment.getAncestor(PACKAGE_FRAGMENT_ROOT);
}
}
public void init(IWorkbench workbench, IStructuredSelection selection) {
this.selection = selection;
this.workbench = workbench;
}
boolean isComplete() {
return packageNameIsLegal() && unitIsNameLegal() &&
sourceDir!=null &&
sourceDir.getPackageFragment(packageFragment.getElementName())
.equals(packageFragment);
}
public IPackageFragment getPackageFragment() {
return packageFragment;
}
public IPackageFragmentRoot getSourceDir() {
return sourceDir;
}
public String getUnitName() {
return unitName;
}
public boolean isIncludePreamble() {
return includePreamble;
}
public boolean isShared() {
return shared;
}
public boolean isDeclaration() {
return declaration;
}
private String readHeader() {
//TODO: use IRunnableWithProgress
StringBuilder sb = new StringBuilder();
IFile file = getHeaderFile();
if (file.exists() && file.isAccessible()) {
InputStream stream = null;
try {
stream = file.getContents();
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
String line;
while ((line = reader.readLine())!=null) {
sb.append(line).append("\n");
}
}
catch (Exception ex) {
ex.printStackTrace();
}
finally {
try {
if (stream!=null) stream.close();
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
return sb.toString();
}
private void saveHeader(String header) {
//TODO: use IRunnableWithProgress
IFile file = getHeaderFile();
ByteArrayInputStream stream = null;
try {
if (file.exists()) {
file.delete(true, null);
}
stream = new ByteArrayInputStream(header.getBytes()); //TODO: encoding
file.create(stream, true, null);
}
catch (CoreException e) {
e.printStackTrace();
}
finally {
try {
if (stream!=null) stream.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
}
private IFile getHeaderFile() {
return sourceDir.getJavaProject().getProject()
.getFile("header.ceylon");
}
private boolean unitIsNameLegal() {
return unitName!=null &&
unitIsNameLegal(unitName);
}
boolean unitIsNameLegal(String unitName) {
return unitName.matches("\\w+");
}
private String getIllegalUnitNameMessage() {
return "Please enter a legal compilation unit name.";
}
private String getSelectSourceFolderMessage() {
return "Please select a source folder";
}
boolean packageNameIsLegal(String packageName) {
return packageName.isEmpty() ||
packageName.matches("^[a-z_]\\w*(\\.[a-z_]\\w*)*$");
}
private boolean packageNameIsLegal() {
return packageName!=null &&
packageNameIsLegal(packageName);
}
String getIllegalPackageNameMessage() {
return "Please enter a legal package name.";
}
}
| plugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/imp/wizard/NewUnitWizardPage.java | package com.redhat.ceylon.eclipse.imp.wizard;
import static org.eclipse.jdt.core.IJavaElement.PACKAGE_FRAGMENT_ROOT;
import static org.eclipse.jdt.internal.ui.refactoring.nls.SourceContainerDialog.getSourceContainer;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.Status;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IPackageFragment;
import org.eclipse.jdt.core.IPackageFragmentRoot;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.internal.ui.wizards.NewSourceFolderCreationWizard;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.IWizardPage;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Link;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.wizards.IWizardDescriptor;
import com.redhat.ceylon.eclipse.ui.CeylonPlugin;
public class NewUnitWizardPage extends WizardPage implements IWizardPage {
private String unitName;
private IPackageFragmentRoot sourceDir;
private IPackageFragment packageFragment;
private String packageName = "";
private boolean includePreamble = true;
private boolean shared = true;
private boolean declaration;
private final boolean declarationButtonDisabled;
private IStructuredSelection selection;
private IWorkbench workbench;
NewUnitWizardPage(String title, String description,
String defaultUnitName, String icon,
boolean declarationButtonDisabled) {
super(title, title, CeylonPlugin.getInstance()
.getImageRegistry().getDescriptor(icon));
setDescription(description);
unitName = defaultUnitName;
this.declarationButtonDisabled = declarationButtonDisabled;
declaration = declarationButtonDisabled;
}
//TODO: fix copy/paste to ExportModuleWizard
private IJavaElement getSelectedElement() {
if (selection!=null && selection.size()==1) {
Object element = selection.getFirstElement();
if (element instanceof IFile) {
return JavaCore.create(((IFile) element).getParent());
}
else {
return (IJavaElement) ((IAdaptable) element)
.getAdapter(IJavaElement.class);
}
}
else {
return null;
}
}
@Override
public void createControl(Composite parent) {
initializeDialogUnits(parent);
initFromSelection();
Composite composite= new Composite(parent, SWT.NONE);
composite.setFont(parent.getFont());
GridLayout layout = new GridLayout();
layout.numColumns = 4;
composite.setLayout(layout);
createControls(composite);
setControl(composite);
Dialog.applyDialogFont(composite);
setPageComplete(isComplete());
}
void createControls(Composite composite) {
Text name = createNameField(composite);
createDeclarationField(composite);
createSeparator(composite);
Text folder = createFolderField(composite);
createPackageField(composite, folder);
name.forceFocus();
}
void createSeparator(Composite composite) {
Label sep = new Label(composite, SWT.SEPARATOR | SWT.HORIZONTAL);
GridData sgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
sgd.horizontalSpan = 4;
sep.setLayoutData(sgd);
}
Text createNameField(Composite composite) {
Label nameLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
nameLabel.setText(getCompilationUnitLabel());
GridData lgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
lgd.horizontalSpan = 1;
nameLabel.setLayoutData(lgd);
final Text name = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData ngd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
ngd.horizontalSpan = 2;
ngd.grabExcessHorizontalSpace = true;
name.setLayoutData(ngd);
name.setText(unitName);
name.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
unitName = name.getText();
if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else {
setErrorMessage(null);
}
setPageComplete(isComplete());
}
});
new Label(composite, SWT.NONE);
new Label(composite, SWT.NONE);
Button includeHeader = new Button(composite, SWT.CHECK);
includeHeader.setText("Include preamble in 'header.ceylon' in project root");
includeHeader.setSelection(includePreamble);
GridData igd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
igd.horizontalSpan = 3;
igd.grabExcessHorizontalSpace = true;
includeHeader.setLayoutData(igd);
includeHeader.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
includePreamble = !includePreamble;
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Edit 'header.ceylon'...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 2;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (sourceDir==null) {
MessageDialog.openWarning(getShell(), "No Source Folder",
getSelectSourceFolderMessage());
}
else {
EditDialog d = new EditDialog(getShell());
d.setText(readHeader());
if (d.open()==Status.OK) {
saveHeader(d.getText());
}
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
return name;
}
String getCompilationUnitLabel() {
return "Compilation unit name: ";
}
Text createFolderField(Composite composite) {
Label folderLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
folderLabel.setText("Source folder: ");
GridData flgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
flgd.horizontalSpan = 1;
folderLabel.setLayoutData(flgd);
final Text folder = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData fgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
fgd.horizontalSpan = 2;
fgd.grabExcessHorizontalSpace = true;
folder.setLayoutData(fgd);
if (sourceDir!=null) {
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
}
folder.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
String folderName = folder.getText();
try {
sourceDir = null;
for (IJavaProject jp: JavaCore.create(ResourcesPlugin.getWorkspace().getRoot())
.getJavaProjects()) {
for (IPackageFragmentRoot pfr: jp.getPackageFragmentRoots()) {
if (pfr.getPath().toPortableString().equals(folderName)) {
sourceDir = pfr;
}
}
}
}
catch (JavaModelException jme) {
jme.printStackTrace();
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
setPageComplete(isComplete());
}
});
Button selectFolder = new Button(composite, SWT.PUSH);
selectFolder.setText("Browse...");
GridData sfgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
sfgd.horizontalSpan = 1;
selectFolder.setLayoutData(sfgd);
selectFolder.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
IPackageFragmentRoot pfr = getSourceContainer(getShell(),
ResourcesPlugin.getWorkspace().getRoot(), sourceDir);
if (pfr!=null) {
sourceDir = pfr;
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
packageFragment = sourceDir.getPackageFragment(packageName);
setPageComplete(isComplete());
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Create new source folder...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 3;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
IPackageFragmentRoot pfr = (IPackageFragmentRoot) openSourceFolderWizard();
if (pfr!=null) {
sourceDir = pfr;
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
packageFragment = sourceDir.getPackageFragment(packageName);
setPageComplete(isComplete());
}
if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return folder;
}
Text createPackageField(Composite composite, final Text folder) {
final Text pkg = createPackageField(composite);
new Label(composite, SWT.NONE);
Link link = new Link(composite, SWT.NONE);
link.setText("<a>Create new Ceylon package with descriptor...</a>");
GridData kgd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
kgd.horizontalSpan = 3;
kgd.grabExcessHorizontalSpace = true;
link.setLayoutData(kgd);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
NewPackageWizard wiz = openPackageWizard();
IPackageFragment pfr = wiz.getPackageFragment();
if (pfr!=null) {
sourceDir = wiz.getSourceFolder();
String folderName = sourceDir.getPath().toPortableString();
folder.setText(folderName);
pkg.setText(pfr.getElementName());
packageFragment = pfr;
setPageComplete(isComplete());
}
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return pkg;
}
Text createPackageField(Composite composite) {
Label packageLabel = new Label(composite, SWT.LEFT | SWT.WRAP);
packageLabel.setText(getPackageLabel());
GridData plgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
plgd.horizontalSpan = 1;
packageLabel.setLayoutData(plgd);
final Text pkg = new Text(composite, SWT.SINGLE | SWT.BORDER);
GridData pgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
pgd.horizontalSpan = 2;
pgd.grabExcessHorizontalSpace = true;
pkg.setLayoutData(pgd);
pkg.setText(packageName);
pkg.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
packageName = pkg.getText();
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
if (sourceDir!=null) {
packageFragment = sourceDir.getPackageFragment(packageName);
}
setPageComplete(isComplete());
}
});
/*if (packageFragment!=null) {
String pkgName = packageFragment.getElementName();
pkg.setText(pkgName);
}*/
Button selectPackage = new Button(composite, SWT.PUSH);
selectPackage.setText("Browse...");
GridData spgd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
spgd.horizontalSpan = 1;
selectPackage.setLayoutData(spgd);
selectPackage.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (sourceDir==null) {
MessageDialog.openWarning(getShell(), "No Source Folder",
getSelectSourceFolderMessage());
}
else {
PackageSelectionDialog dialog = new PackageSelectionDialog(getShell(), sourceDir);
dialog.setMultipleSelection(false);
dialog.setTitle("Package Selection");
dialog.setMessage("Select a package:");
dialog.open();
Object result = dialog.getFirstResult();
if (result!=null) {
packageName = ((IPackageFragment) result).getElementName();
pkg.setText(packageName);
setPageComplete(isComplete());
}
if (!packageNameIsLegal()) {
setErrorMessage(getIllegalPackageNameMessage());
}
else if (sourceDir==null) {
setErrorMessage(getSelectSourceFolderMessage());
}
else if (!unitIsNameLegal()) {
setErrorMessage(getIllegalUnitNameMessage());
}
else {
setErrorMessage(null);
}
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
return pkg;
}
private NewPackageWizard openPackageWizard() {
IWizardDescriptor descriptor = PlatformUI.getWorkbench().getNewWizardRegistry()
.findWizard("com.redhat.ceylon.eclipse.ui.newPackageWizard");
if (descriptor!=null) {
try {
NewPackageWizard wizard = (NewPackageWizard) descriptor.createWizard();
wizard.init(workbench, selection);
WizardDialog wd = new WizardDialog(Display.getCurrent().getActiveShell(),
wizard);
wd.setTitle(wizard.getWindowTitle());
wd.open();
return wizard;
}
catch (CoreException e) {
e.printStackTrace();
}
}
return null;
}
private IJavaElement openSourceFolderWizard() {
IWizardDescriptor descriptor = PlatformUI.getWorkbench().getNewWizardRegistry()
.findWizard("org.eclipse.jdt.ui.wizards.NewSourceFolderCreationWizard");
if (descriptor!=null) {
try {
NewSourceFolderCreationWizard wizard = (NewSourceFolderCreationWizard) descriptor.createWizard();
wizard.init(workbench, selection);
WizardDialog wd = new WizardDialog(Display.getCurrent().getActiveShell(),
wizard);
wd.setTitle(wizard.getWindowTitle());
wd.open();
return wizard.getCreatedElement();
}
catch (CoreException e) {
e.printStackTrace();
}
}
return null;
}
void createSharedField(Composite composite) {
new Label(composite, SWT.NONE);
Button sharedPackage = new Button(composite, SWT.CHECK);
sharedPackage.setText(getSharedPackageLabel());
sharedPackage.setSelection(shared);
GridData igd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
igd.horizontalSpan = 3;
igd.grabExcessHorizontalSpace = true;
sharedPackage.setLayoutData(igd);
sharedPackage.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
shared = !shared;
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
}
void createDeclarationField(Composite composite) {
new Label(composite, SWT.NONE);
Button dec = new Button(composite, SWT.CHECK);
dec.setText("create toplevel class or method declaration");
dec.setSelection(declaration);
dec.setEnabled(!declarationButtonDisabled);
GridData igd= new GridData(GridData.HORIZONTAL_ALIGN_FILL);
igd.horizontalSpan = 3;
igd.grabExcessHorizontalSpace = true;
dec.setLayoutData(igd);
dec.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
declaration = !declaration;
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
}
String getSharedPackageLabel() {
return "Create shared package (visible to other modules)";
}
String getPackageLabel() {
return "Package: ";
}
public void initFromSelection() {
IJavaElement je = getSelectedElement();
if (je instanceof IPackageFragmentRoot) {
sourceDir = (IPackageFragmentRoot) je;
packageFragment = sourceDir.getPackageFragment("");
packageName = packageFragment.getElementName();
}
else if (je instanceof IPackageFragment) {
packageFragment = (IPackageFragment) je;
packageName = packageFragment.getElementName();
sourceDir = (IPackageFragmentRoot) packageFragment.getAncestor(PACKAGE_FRAGMENT_ROOT);
}
}
public void init(IWorkbench workbench, IStructuredSelection selection) {
this.selection = selection;
this.workbench = workbench;
}
boolean isComplete() {
return packageNameIsLegal() && unitIsNameLegal() &&
sourceDir!=null &&
sourceDir.getPackageFragment(packageFragment.getElementName())
.equals(packageFragment);
}
public IPackageFragment getPackageFragment() {
return packageFragment;
}
public IPackageFragmentRoot getSourceDir() {
return sourceDir;
}
public String getUnitName() {
return unitName;
}
public boolean isIncludePreamble() {
return includePreamble;
}
public boolean isShared() {
return shared;
}
public boolean isDeclaration() {
return declaration;
}
private String readHeader() {
//TODO: use IRunnableWithProgress
StringBuilder sb = new StringBuilder();
IFile file = getHeaderFile();
if (file.exists() && file.isAccessible()) {
InputStream stream = null;
try {
stream = file.getContents();
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
String line;
while ((line = reader.readLine())!=null) {
sb.append(line).append("\n");
}
}
catch (Exception ex) {
ex.printStackTrace();
}
finally {
try {
if (stream!=null) stream.close();
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
return sb.toString();
}
private void saveHeader(String header) {
//TODO: use IRunnableWithProgress
IFile file = getHeaderFile();
ByteArrayInputStream stream = null;
try {
if (file.exists()) {
file.delete(true, null);
}
stream = new ByteArrayInputStream(header.getBytes()); //TODO: encoding
file.create(stream, true, null);
}
catch (CoreException e) {
e.printStackTrace();
}
finally {
try {
if (stream!=null) stream.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
}
private IFile getHeaderFile() {
return sourceDir.getJavaProject().getProject()
.getFile("header.ceylon");
}
private boolean unitIsNameLegal() {
return unitName!=null &&
unitIsNameLegal(unitName);
}
boolean unitIsNameLegal(String unitName) {
return unitName.matches("\\w+");
}
private String getIllegalUnitNameMessage() {
return "Please enter a legal compilation unit name.";
}
private String getSelectSourceFolderMessage() {
return "Please select a source folder";
}
boolean packageNameIsLegal(String packageName) {
return packageName.isEmpty() ||
packageName.matches("^[a-z_]\\w*(\\.[a-z_]\\w*)*$");
}
private boolean packageNameIsLegal() {
return packageName!=null &&
packageNameIsLegal(packageName);
}
String getIllegalPackageNameMessage() {
return "Please enter a legal package name.";
}
}
| fix bug where Finish button was not enabled after source folder
selection | plugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/imp/wizard/NewUnitWizardPage.java | fix bug where Finish button was not enabled after source folder selection |
|
Java | mpl-2.0 | 43c8f82c728da39ec884955f03126365e46e7b50 | 0 | Skelril/Skree | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package com.skelril.skree.content.zone.group.jungleraid;
import com.flowpowered.math.vector.Vector3i;
import com.google.common.collect.Lists;
import com.skelril.nitro.Clause;
import com.skelril.nitro.entity.SafeTeleportHelper;
import com.skelril.nitro.probability.Probability;
import com.skelril.skree.content.zone.LegacyZoneBase;
import com.skelril.skree.service.internal.zone.Zone;
import com.skelril.skree.service.internal.zone.ZoneRegion;
import com.skelril.skree.service.internal.zone.ZoneStatus;
import org.apache.commons.lang3.text.WordUtils;
import org.spongepowered.api.block.BlockState;
import org.spongepowered.api.block.BlockTypes;
import org.spongepowered.api.block.trait.EnumTraits;
import org.spongepowered.api.data.key.Keys;
import org.spongepowered.api.data.meta.ItemEnchantment;
import org.spongepowered.api.data.type.DyeColor;
import org.spongepowered.api.data.type.DyeColors;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.item.Enchantments;
import org.spongepowered.api.item.ItemTypes;
import org.spongepowered.api.item.inventory.ItemStack;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.channel.MessageChannel;
import org.spongepowered.api.text.format.TextColors;
import org.spongepowered.api.util.Color;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import static com.skelril.nitro.item.ItemStackFactory.newItemStack;
import static com.skelril.nitro.transformer.ForgeTransformer.tf;
import static com.skelril.skree.service.internal.zone.PlayerClassifier.PARTICIPANT;
import static com.skelril.skree.service.internal.zone.PlayerClassifier.SPECTATOR;
public class JungleRaidInstance extends LegacyZoneBase implements Zone, Runnable {
private Map<Player, Set<Player>> teamMapping = new HashMap<>();
private Set<Player> freeForAllPlayers = new HashSet<>();
private Set<Player> blueTeamPlayers = new HashSet<>();
private Set<Player> redTeamPlayers = new HashSet<>();
private Map<Player, JungleRaidClass> classMap = new HashMap<>();
private Map<Player, Player> lastAttackerMap = new HashMap<>();
private JungleRaidState state = JungleRaidState.LOBBY;
private long startTime;
private Location<World> lobbySpawnLocation;
private Location<World> leftFlagActivationSign;
private Location<World> rightFlagActivationSign;
private List<Location<World>> scrollingFlagSigns = new ArrayList<>();
private Location<World> leftClassActivationSign;
private Location<World> rightClassActivationSign;
private List<Location<World>> scrollingClassSigns = new ArrayList<>();
private int signScrollFlagStart;
private int signScrollClassStart;
private FlagEffectData flagData = new FlagEffectData();
private boolean[] flagState = new boolean[JungleRaidFlag.values().length];
public JungleRaidInstance(ZoneRegion region) {
super(region);
}
@Override
public boolean init() {
setUp();
remove();
return true;
}
private void setUp() {
Vector3i offset = getRegion().getMinimumPoint();
lobbySpawnLocation = new Location<>(getRegion().getExtent(), offset.add(216, 2, 29));
leftFlagActivationSign = new Location<>(getRegion().getExtent(), offset.add(209, 3, 29));
rightFlagActivationSign = new Location<>(getRegion().getExtent(), offset.add(209, 3, 23));
for (int z = 28; z > 23; --z) { // Do this in rerverse so left/right buttons are correct
scrollingFlagSigns.add(new Location<>(getRegion().getExtent(), offset.add(209, 3, z)));
}
for (JungleRaidFlag flag : JungleRaidFlag.values()) {
flagState[flag.index] = flag.enabledByDefault;
}
flagSignPopulate();
leftClassActivationSign = new Location<>(getRegion().getExtent(), offset.add(209, 3, 22));
rightClassActivationSign = new Location<>(getRegion().getExtent(), offset.add(209, 3, 18));
for (int z = 21; z > 18; --z) { // Do this in rerverse so left/right buttons are correct
scrollingClassSigns.add(new Location<>(getRegion().getExtent(), offset.add(209, 3, z)));
}
classSignPopulate();
}
private void updateFlagSign(int index) {
String title = JungleRaidFlag.values()[signScrollFlagStart + index].toString();
if (title.length() > 15) {
title = title.substring(0, 15);
}
title = WordUtils.capitalizeFully(title.replace("_", " "));
scrollingFlagSigns.get(index).getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(title),
Text.of(flagState[signScrollFlagStart + index] ? Text.of(TextColors.DARK_GREEN, "Enabled") : Text.of(TextColors.RED, "Disabled")),
Text.EMPTY
));
}
private void flagSignPopulate() {
for (int i = 0; i < scrollingFlagSigns.size(); ++i) {
updateFlagSign(i);
}
boolean isLeftScrollable = signScrollFlagStart == 0;
leftFlagActivationSign.getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(isLeftScrollable ? "" : TextColors.BLUE, "<<"),
Text.EMPTY,
Text.EMPTY
));
boolean isRightScrollable = signScrollFlagStart + scrollingFlagSigns.size() == JungleRaidFlag.values().length;
rightFlagActivationSign.getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(isRightScrollable ? "" : TextColors.BLUE, ">>"),
Text.EMPTY,
Text.EMPTY
));
}
public Location<World> getLeftFlagActivationSign() {
return leftFlagActivationSign;
}
public Location<World> getRightFlagActivationSign() {
return rightFlagActivationSign;
}
public void leftFlagListSign() {
signScrollFlagStart = Math.max(0, signScrollFlagStart - scrollingFlagSigns.size());
flagSignPopulate();
}
public void rightFlagListSign() {
signScrollFlagStart = Math.min(JungleRaidFlag.values().length - scrollingFlagSigns.size(), signScrollFlagStart + scrollingFlagSigns.size());
flagSignPopulate();
}
public void tryToggleFlagSignAt(Location<World> loc) {
for (int i = 0; i < scrollingFlagSigns.size(); ++i) {
if (loc.equals(scrollingFlagSigns.get(i))) {
flagState[signScrollFlagStart + i] = !flagState[signScrollFlagStart + i];
updateFlagSign(i);
break;
}
}
}
private void updateClassSign(int index) {
String title = JungleRaidClass.values()[signScrollClassStart + index].toString();
if (title.length() > 15) {
title = title.substring(0, 15);
}
title = WordUtils.capitalizeFully(title.replace("_", " "));
scrollingClassSigns.get(index).getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(title),
Text.EMPTY,
Text.EMPTY
));
}
private void classSignPopulate() {
for (int i = 0; i < scrollingClassSigns.size(); ++i) {
updateClassSign(i);
}
boolean isLeftScrollable = signScrollClassStart == 0;
leftClassActivationSign.getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(isLeftScrollable ? "" : TextColors.BLUE, "<<"),
Text.EMPTY,
Text.EMPTY
));
boolean isRightScrollable = signScrollClassStart + scrollingClassSigns.size() == JungleRaidClass.values().length;
rightClassActivationSign.getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(isRightScrollable ? "" : TextColors.BLUE, ">>"),
Text.EMPTY,
Text.EMPTY
));
}
public Location<World> getLeftClassActivationSign() {
return leftClassActivationSign;
}
public Location<World> getRightClassActivationSign() {
return rightClassActivationSign;
}
public void leftClassListSign() {
signScrollClassStart = Math.max(0, signScrollClassStart - scrollingClassSigns.size());
classSignPopulate();
}
public void rightClassListSign() {
signScrollClassStart = Math.min(JungleRaidClass.values().length - scrollingClassSigns.size(), signScrollClassStart + scrollingClassSigns.size());
classSignPopulate();
}
public void tryUseClassSignAt(Location<World> loc, Player player) {
for (int i = 0; i < scrollingClassSigns.size(); ++i) {
if (loc.equals(scrollingClassSigns.get(i))) {
JungleRaidClass targetClass = JungleRaidClass.values()[signScrollClassStart + i];
giveBaseEquipment(player, targetClass);
classMap.put(player, targetClass);
break;
}
}
}
public void setFlag(JungleRaidFlag flag, boolean enabled) {
flagState[flag.index] = enabled;
}
public boolean isFlagEnabled(JungleRaidFlag flag) {
return flagState[flag.index];
}
@Override
public void forceEnd() {
remove(getPlayers(PARTICIPANT));
remove();
}
@Override
public void run() {
if (isEmpty()) {
expire();
return;
}
if (state == JungleRaidState.LOBBY) {
smartStart();
return;
}
if (state == JungleRaidState.INITIALIZE) {
tryBeginCombat();
return;
}
Optional<Clause<String, WinType>> optWinner = getWinner();
if (optWinner.isPresent()) {
processWin(optWinner.get());
expire();
return;
}
JungleRaidEffectProcessor.run(this);
}
public JungleRaidState getState() {
return state;
}
public long getStartTime() {
return startTime;
}
public FlagEffectData getFlagData() {
return flagData;
}
private void tryBeginCombat() {
if (System.currentTimeMillis() - startTime >= TimeUnit.MINUTES.toMillis(1)) {
state = JungleRaidState.IN_PROGRESS;
getPlayerMessageChannel(SPECTATOR).send(Text.of(TextColors.DARK_RED, "LET THE SLAUGHTER BEGIN!"));
}
}
public Optional<Clause<String, WinType>> getWinner() {
if (freeForAllPlayers.size() == 1 && blueTeamPlayers.isEmpty() && redTeamPlayers.isEmpty()) {
return Optional.of(new Clause<>(freeForAllPlayers.iterator().next().getName(), WinType.SOLO));
} else if (freeForAllPlayers.isEmpty() && !blueTeamPlayers.isEmpty() && redTeamPlayers.isEmpty()) {
return Optional.of(new Clause<>("Blue", WinType.TEAM));
} else if (freeForAllPlayers.isEmpty() && blueTeamPlayers.isEmpty() && !redTeamPlayers.isEmpty()) {
return Optional.of(new Clause<>("Red", WinType.TEAM));
} else if (freeForAllPlayers.isEmpty() && blueTeamPlayers.isEmpty() && redTeamPlayers.isEmpty()) {
return Optional.of(new Clause<>(null, WinType.DRAW));
}
return Optional.empty();
}
private void processWin(Clause<String, WinType> winClause) {
state = JungleRaidState.DONE;
switch (winClause.getValue()) {
case SOLO:
MessageChannel.TO_ALL.send(Text.of(TextColors.GOLD, winClause.getKey(), " has won the jungle raid!"));
break;
case TEAM:
MessageChannel.TO_ALL.send(Text.of(TextColors.GOLD, winClause.getKey(), " team has won the jungle raid!"));
break;
case DRAW:
MessageChannel.TO_ALL.send(Text.of(TextColors.GOLD, "The jungle raid was a draw!"));
break;
}
}
@Override
public Clause<Player, ZoneStatus> add(Player player) {
if (state == JungleRaidState.LOBBY) {
player.setLocation(lobbySpawnLocation);
return new Clause<>(player, ZoneStatus.ADDED);
}
return new Clause<>(player, ZoneStatus.NO_REJOIN);
}
private void giveBaseEquipment(Player player, JungleRaidClass jrClass) {
player.getInventory().clear();
List<ItemStack> gear = new ArrayList<>();
switch (jrClass) {
case MELEE:
ItemStack enchantedSword = newItemStack(ItemTypes.IRON_SWORD);
enchantedSword.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.FIRE_ASPECT, 2),
new ItemEnchantment(Enchantments.KNOCKBACK, 2)
));
gear.add(enchantedSword);
break;
case LUMBERJACK:
ItemStack enchantedAxe = newItemStack(ItemTypes.DIAMOND_AXE);
enchantedAxe.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.SHARPNESS, 3),
new ItemEnchantment(Enchantments.KNOCKBACK, 2)
));
gear.add(enchantedAxe);
break;
case ARCHER:
ItemStack dmgBow = newItemStack(ItemTypes.BOW);
dmgBow.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.PUNCH, 2)
));
gear.add(dmgBow);
ItemStack fireBow = newItemStack(ItemTypes.BOW);
fireBow.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.FLAME, 1)
));
gear.add(fireBow);
break;
case SNIPER:
ItemStack superBow = newItemStack(ItemTypes.BOW);
superBow.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.POWER, 5),
new ItemEnchantment(Enchantments.PUNCH, 2),
new ItemEnchantment(Enchantments.FLAME, 1)
));
superBow.offer(Keys.ITEM_DURABILITY, jrClass.getArrowAmount());
gear.add(superBow);
ItemStack woodSword = newItemStack(ItemTypes.WOODEN_SWORD);
gear.add(woodSword);
break;
case ENGINEER:
ItemStack ironSword = newItemStack(ItemTypes.IRON_SWORD);
gear.add(ironSword);
ItemStack diamondPickaxe = newItemStack(ItemTypes.DIAMOND_PICKAXE);
gear.add(diamondPickaxe);
break;
case BALANCED:
ItemStack standardSword = newItemStack(ItemTypes.IRON_SWORD);
gear.add(standardSword);
ItemStack standardBow = newItemStack(ItemTypes.BOW);
gear.add(standardBow);
break;
}
int tntAmt = jrClass.getTNTAmount();
int tntStacks = tntAmt / 64;
int tntRemainder = tntAmt % 64;
for (int i = 0; i < tntStacks; ++i) {
gear.add(newItemStack(BlockTypes.TNT, 64));
}
if (tntRemainder > 0) {
gear.add(newItemStack(BlockTypes.TNT, tntRemainder));
}
if (jrClass.hasFlintAndSteel()) {
gear.add(newItemStack(ItemTypes.FLINT_AND_STEEL));
}
if (jrClass.hasShears()) {
gear.add(newItemStack(ItemTypes.SHEARS));
}
if (jrClass.hasAxe()) {
gear.add(newItemStack(ItemTypes.IRON_AXE));
}
gear.add(newItemStack(ItemTypes.COOKED_BEEF, 64));
gear.add(newItemStack(ItemTypes.COMPASS));
int arrowAmt = jrClass.getArrowAmount();
int arrowStacks = arrowAmt / 64;
int arrowRemainder = arrowAmt % 64;
for (int i = 0; i < arrowStacks; ++i) {
gear.add(newItemStack(ItemTypes.ARROW, 64));
}
if (arrowRemainder > 0) {
gear.add(newItemStack(ItemTypes.ARROW, arrowRemainder));
}
for (ItemStack stack : gear) {
player.getInventory().offer(stack);
}
}
private void giveTeamEquipment(Player player, Color teamColor) {
// EquipmentInventory playerEquipment = player.getInventory().query(EquipmentInventory.class);
ItemStack teamHood = newItemStack(ItemTypes.LEATHER_HELMET);
teamHood.offer(Keys.DISPLAY_NAME, Text.of(TextColors.WHITE, "Team Hood"));
teamHood.offer(Keys.COLOR, teamColor);
// playerEquipment.set(EquipmentTypes.HEADWEAR, teamHood);
tf(player).inventory.armorInventory[3] = tf(teamHood);
ItemStack teamChestplate = newItemStack(ItemTypes.LEATHER_CHESTPLATE);
teamChestplate.offer(Keys.DISPLAY_NAME, Text.of(TextColors.WHITE, "Team Chestplate"));
teamChestplate.offer(Keys.COLOR, teamColor);
// playerEquipment.set(EquipmentTypes.CHESTPLATE, teamChestplate);
tf(player).inventory.armorInventory[2] = tf(teamChestplate);
ItemStack teamLeggings = newItemStack(ItemTypes.LEATHER_LEGGINGS);
teamLeggings.offer(Keys.DISPLAY_NAME, Text.of(TextColors.WHITE, "Team Leggings"));
teamLeggings.offer(Keys.COLOR, teamColor);
// playerEquipment.set(EquipmentTypes.LEGGINGS, teamLeggings);
tf(player).inventory.armorInventory[1] = tf(teamLeggings);
ItemStack teamBoots = newItemStack(ItemTypes.LEATHER_BOOTS);
teamBoots.offer(Keys.DISPLAY_NAME, Text.of(TextColors.WHITE, "Team Boots"));
teamBoots.offer(Keys.COLOR, teamColor);
// playerEquipment.set(EquipmentTypes.BOOTS, teamBoots);
tf(player).inventory.armorInventory[0] = tf(teamBoots);
}
private void addPlayer(Player player, Supplier<Location<World>> startingPos, Color teamColor, JungleRaidClass jrClass) {
giveBaseEquipment(player, jrClass);
giveTeamEquipment(player, teamColor);
player.setLocation(startingPos.get());
}
public void addFFAPlayer(Player player, JungleRaidClass jrClass) {
addPlayer(player, this::getRandomLocation, Color.WHITE, jrClass);
freeForAllPlayers.add(player);
teamMapping.put(player, freeForAllPlayers);
}
public void addBluePlayer(Player player, JungleRaidClass jrClass) {
Location<World> spawnPoint = getRandomLocation();
addPlayer(player, () -> spawnPoint, Color.BLUE, jrClass);
blueTeamPlayers.add(player);
teamMapping.put(player, blueTeamPlayers);
}
public void addRedPlayer(Player player, JungleRaidClass jrClass) {
Location<World> spawnPoint = getRandomLocation();
addPlayer(player, () -> spawnPoint, Color.RED, jrClass);
redTeamPlayers.add(player);
teamMapping.put(player, redTeamPlayers);
}
public void smartStart() {
List<Player> ffaList = new ArrayList<>();
List<Player> redList = new ArrayList<>();
List<Player> blueList = new ArrayList<>();
Collection<Player> containedPlayers = getPlayers(PARTICIPANT);
if (containedPlayers.size() <= 1) {
return;
}
for (Player player : containedPlayers) {
BlockState state = player.getLocation().add(0, -1, 0).getBlock();
if (state.getType() != BlockTypes.WOOL) {
return;
}
Optional<?> optColor = state.getTraitValue(EnumTraits.WOOL_COLOR);
if (optColor.isPresent()) {
DyeColor color = (DyeColor) optColor.get();
if (color == DyeColors.RED) {
redList.add(player);
} else if (color == DyeColors.BLUE) {
blueList.add(player);
} else if (color == DyeColors.WHITE) {
ffaList.add(player);
} else {
return;
}
}
}
ffaList.stream().forEach(p -> addFFAPlayer(p, classMap.getOrDefault(p, JungleRaidClass.BALANCED)));
redList.stream().forEach(p -> addRedPlayer(p, classMap.getOrDefault(p, JungleRaidClass.BALANCED)));
blueList.stream().forEach(p -> addBluePlayer(p, classMap.getOrDefault(p, JungleRaidClass.BALANCED)));
state = JungleRaidState.INITIALIZE;
startTime = System.currentTimeMillis();
}
public Location<World> getRandomLocation() {
Vector3i offset = getRegion().getMinimumPoint();
Vector3i boundingBox = getRegion().getBoundingBox();
Vector3i randomDest;
while (true) {
randomDest = new Vector3i(
Probability.getRandom(boundingBox.getX()),
Probability.getRangedRandom(16, 80),
Probability.getRandom(boundingBox.getZ())
).add(offset);
Optional<Location<World>> optSafeDest = SafeTeleportHelper.getSafeDest(
new Location<>(getRegion().getExtent(), randomDest)
);
if (optSafeDest.isPresent()) {
Location<World> safeDest = optSafeDest.get();
if (safeDest.getY() > 16 && safeDest.getY() < 80) {
return safeDest;
}
}
}
}
@Override
public Clause<Player, ZoneStatus> remove(Player player) {
playerLost(player);
return super.remove(player);
}
public void playerLost(Player player) {
Set<Player> teamPlayers = teamMapping.remove(player);
if (teamPlayers != null) {
teamPlayers.remove(player);
player.getInventory().clear();
payPlayer(player);
}
}
public Color getTeamColor(Player player) {
Set<Player> playerTeam = teamMapping.get(player);
if (playerTeam == redTeamPlayers) {
return Color.RED;
} else if (playerTeam == blueTeamPlayers) {
return Color.BLUE;
}
return Color.WHITE;
}
private void payPlayer(Player player) {
}
public void recordAttack(Player attacker, Player defender) {
lastAttackerMap.put(defender, attacker);
}
public Optional<Player> getLastAttacker(Player defender) {
return Optional.ofNullable(lastAttackerMap.get(defender));
}
public boolean isFriendlyFire(Player attacker, Player defender) {
Set<Player> attackerTeam = teamMapping.get(attacker);
Set<Player> defenderTeam = teamMapping.get(defender);
/* We want identity comparison to prevent expensive list comparisons */
return attackerTeam == defenderTeam && attackerTeam != freeForAllPlayers && attackerTeam != null;
}
}
| src/main/java/com/skelril/skree/content/zone/group/jungleraid/JungleRaidInstance.java | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package com.skelril.skree.content.zone.group.jungleraid;
import com.flowpowered.math.vector.Vector3i;
import com.google.common.collect.Lists;
import com.skelril.nitro.Clause;
import com.skelril.nitro.entity.SafeTeleportHelper;
import com.skelril.nitro.probability.Probability;
import com.skelril.skree.content.zone.LegacyZoneBase;
import com.skelril.skree.service.internal.zone.Zone;
import com.skelril.skree.service.internal.zone.ZoneRegion;
import com.skelril.skree.service.internal.zone.ZoneStatus;
import org.apache.commons.lang3.text.WordUtils;
import org.spongepowered.api.block.BlockState;
import org.spongepowered.api.block.BlockTypes;
import org.spongepowered.api.block.trait.EnumTraits;
import org.spongepowered.api.data.key.Keys;
import org.spongepowered.api.data.meta.ItemEnchantment;
import org.spongepowered.api.data.type.DyeColor;
import org.spongepowered.api.data.type.DyeColors;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.item.Enchantments;
import org.spongepowered.api.item.ItemTypes;
import org.spongepowered.api.item.inventory.ItemStack;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.channel.MessageChannel;
import org.spongepowered.api.text.format.TextColors;
import org.spongepowered.api.util.Color;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import static com.skelril.nitro.item.ItemStackFactory.newItemStack;
import static com.skelril.nitro.transformer.ForgeTransformer.tf;
import static com.skelril.skree.service.internal.zone.PlayerClassifier.PARTICIPANT;
import static com.skelril.skree.service.internal.zone.PlayerClassifier.SPECTATOR;
public class JungleRaidInstance extends LegacyZoneBase implements Zone, Runnable {
private Map<Player, Set<Player>> teamMapping = new HashMap<>();
private Set<Player> freeForAllPlayers = new HashSet<>();
private Set<Player> blueTeamPlayers = new HashSet<>();
private Set<Player> redTeamPlayers = new HashSet<>();
private Map<Player, JungleRaidClass> classMap = new HashMap<>();
private Map<Player, Player> lastAttackerMap = new HashMap<>();
private JungleRaidState state = JungleRaidState.LOBBY;
private long startTime;
private Location<World> lobbySpawnLocation;
private Location<World> leftFlagActivationSign;
private Location<World> rightFlagActivationSign;
private List<Location<World>> scrollingFlagSigns = new ArrayList<>();
private Location<World> leftClassActivationSign;
private Location<World> rightClassActivationSign;
private List<Location<World>> scrollingClassSigns = new ArrayList<>();
private int signScrollFlagStart;
private int signScrollClassStart;
private FlagEffectData flagData = new FlagEffectData();
private boolean[] flagState = new boolean[JungleRaidFlag.values().length];
public JungleRaidInstance(ZoneRegion region) {
super(region);
}
@Override
public boolean init() {
setUp();
remove();
return true;
}
private void setUp() {
Vector3i offset = getRegion().getMinimumPoint();
lobbySpawnLocation = new Location<>(getRegion().getExtent(), offset.add(216, 2, 29));
leftFlagActivationSign = new Location<>(getRegion().getExtent(), offset.add(209, 3, 29));
rightFlagActivationSign = new Location<>(getRegion().getExtent(), offset.add(209, 3, 23));
for (int z = 28; z > 23; --z) { // Do this in rerverse so left/right buttons are correct
scrollingFlagSigns.add(new Location<>(getRegion().getExtent(), offset.add(209, 3, z)));
}
for (JungleRaidFlag flag : JungleRaidFlag.values()) {
flagState[flag.index] = flag.enabledByDefault;
}
flagSignPopulate();
leftClassActivationSign = new Location<>(getRegion().getExtent(), offset.add(209, 3, 22));
rightClassActivationSign = new Location<>(getRegion().getExtent(), offset.add(209, 3, 18));
for (int z = 21; z > 18; --z) { // Do this in rerverse so left/right buttons are correct
scrollingClassSigns.add(new Location<>(getRegion().getExtent(), offset.add(209, 3, z)));
}
classSignPopulate();
}
private void updateFlagSign(int index) {
String title = JungleRaidFlag.values()[signScrollFlagStart + index].toString();
if (title.length() > 15) {
title = title.substring(0, 15);
}
title = WordUtils.capitalizeFully(title.replace("_", " "));
scrollingFlagSigns.get(index).getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(title),
Text.of(flagState[signScrollFlagStart + index] ? Text.of(TextColors.DARK_GREEN, "Enabled") : Text.of(TextColors.RED, "Disabled")),
Text.EMPTY
));
}
private void flagSignPopulate() {
for (int i = 0; i < scrollingFlagSigns.size(); ++i) {
updateFlagSign(i);
}
boolean isLeftScrollable = signScrollFlagStart == 0;
leftFlagActivationSign.getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(isLeftScrollable ? "" : TextColors.BLUE, "<<"),
Text.EMPTY,
Text.EMPTY
));
boolean isRightScrollable = signScrollFlagStart + scrollingFlagSigns.size() == JungleRaidFlag.values().length;
rightFlagActivationSign.getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(isRightScrollable ? "" : TextColors.BLUE, ">>"),
Text.EMPTY,
Text.EMPTY
));
}
public Location<World> getLeftFlagActivationSign() {
return leftFlagActivationSign;
}
public Location<World> getRightFlagActivationSign() {
return rightFlagActivationSign;
}
public void leftFlagListSign() {
signScrollFlagStart = Math.max(0, signScrollFlagStart - scrollingFlagSigns.size());
flagSignPopulate();
}
public void rightFlagListSign() {
signScrollFlagStart = Math.min(JungleRaidFlag.values().length - scrollingFlagSigns.size(), signScrollFlagStart + scrollingFlagSigns.size());
flagSignPopulate();
}
public void tryToggleFlagSignAt(Location<World> loc) {
for (int i = 0; i < scrollingFlagSigns.size(); ++i) {
if (loc.equals(scrollingFlagSigns.get(i))) {
flagState[signScrollFlagStart + i] = !flagState[signScrollFlagStart + i];
updateFlagSign(i);
break;
}
}
}
private void updateClassSign(int index) {
String title = JungleRaidClass.values()[signScrollClassStart + index].toString();
if (title.length() > 15) {
title = title.substring(0, 15);
}
title = WordUtils.capitalizeFully(title.replace("_", " "));
scrollingClassSigns.get(index).getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(title),
Text.EMPTY,
Text.EMPTY
));
}
private void classSignPopulate() {
for (int i = 0; i < scrollingClassSigns.size(); ++i) {
updateClassSign(i);
}
boolean isLeftScrollable = signScrollClassStart == 0;
leftClassActivationSign.getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(isLeftScrollable ? "" : TextColors.BLUE, "<<"),
Text.EMPTY,
Text.EMPTY
));
boolean isRightScrollable = signScrollClassStart + scrollingClassSigns.size() == JungleRaidClass.values().length;
rightClassActivationSign.getTileEntity().get().offer(Keys.SIGN_LINES, Lists.newArrayList(
Text.EMPTY,
Text.of(isRightScrollable ? "" : TextColors.BLUE, ">>"),
Text.EMPTY,
Text.EMPTY
));
}
public Location<World> getLeftClassActivationSign() {
return leftClassActivationSign;
}
public Location<World> getRightClassActivationSign() {
return rightClassActivationSign;
}
public void leftClassListSign() {
signScrollClassStart = Math.max(0, signScrollClassStart - scrollingClassSigns.size());
classSignPopulate();
}
public void rightClassListSign() {
signScrollClassStart = Math.min(JungleRaidClass.values().length - scrollingClassSigns.size(), signScrollClassStart + scrollingClassSigns.size());
classSignPopulate();
}
public void tryUseClassSignAt(Location<World> loc, Player player) {
for (int i = 0; i < scrollingClassSigns.size(); ++i) {
if (loc.equals(scrollingClassSigns.get(i))) {
JungleRaidClass targetClass = JungleRaidClass.values()[signScrollClassStart + i];
giveBaseEquipment(player, targetClass);
classMap.put(player, targetClass);
break;
}
}
}
public void setFlag(JungleRaidFlag flag, boolean enabled) {
flagState[flag.index] = enabled;
}
public boolean isFlagEnabled(JungleRaidFlag flag) {
return flagState[flag.index];
}
@Override
public void forceEnd() {
remove(getPlayers(PARTICIPANT));
remove();
}
@Override
public void run() {
if (isEmpty()) {
expire();
return;
}
if (state == JungleRaidState.LOBBY) {
smartStart();
return;
}
if (state == JungleRaidState.INITIALIZE) {
tryBeginCombat();
return;
}
Optional<Clause<String, WinType>> optWinner = getWinner();
if (optWinner.isPresent()) {
processWin(optWinner.get());
expire();
return;
}
JungleRaidEffectProcessor.run(this);
}
public JungleRaidState getState() {
return state;
}
public long getStartTime() {
return startTime;
}
public FlagEffectData getFlagData() {
return flagData;
}
private void tryBeginCombat() {
if (System.currentTimeMillis() - startTime >= TimeUnit.MINUTES.toMillis(1)) {
state = JungleRaidState.IN_PROGRESS;
getPlayerMessageChannel(SPECTATOR).send(Text.of(TextColors.DARK_RED, "LET THE SLAUGHTER BEGIN!"));
}
}
public Optional<Clause<String, WinType>> getWinner() {
if (freeForAllPlayers.size() == 1 && blueTeamPlayers.isEmpty() && redTeamPlayers.isEmpty()) {
return Optional.of(new Clause<>(freeForAllPlayers.iterator().next().getName(), WinType.SOLO));
} else if (freeForAllPlayers.isEmpty() && !blueTeamPlayers.isEmpty() && redTeamPlayers.isEmpty()) {
return Optional.of(new Clause<>("Blue", WinType.TEAM));
} else if (freeForAllPlayers.isEmpty() && blueTeamPlayers.isEmpty() && !redTeamPlayers.isEmpty()) {
return Optional.of(new Clause<>("Red", WinType.TEAM));
} else if (freeForAllPlayers.isEmpty() && blueTeamPlayers.isEmpty() && redTeamPlayers.isEmpty()) {
return Optional.of(new Clause<>(null, WinType.DRAW));
}
return Optional.empty();
}
private void processWin(Clause<String, WinType> winClause) {
state = JungleRaidState.DONE;
switch (winClause.getValue()) {
case SOLO:
MessageChannel.TO_ALL.send(Text.of(TextColors.GOLD, winClause.getKey(), " has won the jungle raid!"));
break;
case TEAM:
MessageChannel.TO_ALL.send(Text.of(TextColors.GOLD, winClause.getKey(), " team has won the jungle raid!"));
break;
case DRAW:
MessageChannel.TO_ALL.send(Text.of(TextColors.GOLD, "The jungle raid was a draw!"));
break;
}
}
@Override
public Clause<Player, ZoneStatus> add(Player player) {
if (state == JungleRaidState.LOBBY) {
player.setLocation(lobbySpawnLocation);
return new Clause<>(player, ZoneStatus.ADDED);
}
return new Clause<>(player, ZoneStatus.NO_REJOIN);
}
private void giveBaseEquipment(Player player, JungleRaidClass jrClass) {
player.getInventory().clear();
List<ItemStack> gear = new ArrayList<>();
switch (jrClass) {
case MELEE:
ItemStack enchantedSword = newItemStack(ItemTypes.IRON_SWORD);
enchantedSword.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.FIRE_ASPECT, 2),
new ItemEnchantment(Enchantments.KNOCKBACK, 2)
));
gear.add(enchantedSword);
break;
case LUMBERJACK:
ItemStack enchantedAxe = newItemStack(ItemTypes.DIAMOND_AXE);
enchantedAxe.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.SHARPNESS, 3),
new ItemEnchantment(Enchantments.KNOCKBACK, 2)
));
gear.add(enchantedAxe);
break;
case ARCHER:
ItemStack dmgBow = newItemStack(ItemTypes.BOW);
dmgBow.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.PUNCH, 2)
));
gear.add(dmgBow);
ItemStack fireBow = newItemStack(ItemTypes.BOW);
fireBow.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.FLAME, 1)
));
gear.add(fireBow);
break;
case SNIPER:
ItemStack superBow = newItemStack(ItemTypes.BOW);
superBow.offer(Keys.ITEM_ENCHANTMENTS, Lists.newArrayList(
new ItemEnchantment(Enchantments.POWER, 5),
new ItemEnchantment(Enchantments.PUNCH, 2),
new ItemEnchantment(Enchantments.FLAME, 1)
));
// If this happens things isn't available blow up
// @SuppressWarnings({"ConstantConditions", "OptionalGetWithoutIsPresent"})
// int useLimit = ItemTypes.BOW.getDefaultProperty(UseLimitProperty.class).get().getValue();
int useLimit = 384;
superBow.offer(Keys.ITEM_DURABILITY, useLimit - jrClass.getArrowAmount());
gear.add(superBow);
ItemStack woodSword = newItemStack(ItemTypes.WOODEN_SWORD);
gear.add(woodSword);
break;
case ENGINEER:
ItemStack ironSword = newItemStack(ItemTypes.IRON_SWORD);
gear.add(ironSword);
ItemStack diamondPickaxe = newItemStack(ItemTypes.DIAMOND_PICKAXE);
gear.add(diamondPickaxe);
break;
case BALANCED:
ItemStack standardSword = newItemStack(ItemTypes.IRON_SWORD);
gear.add(standardSword);
ItemStack standardBow = newItemStack(ItemTypes.BOW);
gear.add(standardBow);
break;
}
int tntAmt = jrClass.getTNTAmount();
int tntStacks = tntAmt / 64;
int tntRemainder = tntAmt % 64;
for (int i = 0; i < tntStacks; ++i) {
gear.add(newItemStack(BlockTypes.TNT, 64));
}
if (tntRemainder > 0) {
gear.add(newItemStack(BlockTypes.TNT, tntRemainder));
}
if (jrClass.hasFlintAndSteel()) {
gear.add(newItemStack(ItemTypes.FLINT_AND_STEEL));
}
if (jrClass.hasShears()) {
gear.add(newItemStack(ItemTypes.SHEARS));
}
if (jrClass.hasAxe()) {
gear.add(newItemStack(ItemTypes.IRON_AXE));
}
gear.add(newItemStack(ItemTypes.COOKED_BEEF, 64));
gear.add(newItemStack(ItemTypes.COMPASS));
int arrowAmt = jrClass.getArrowAmount();
int arrowStacks = arrowAmt / 64;
int arrowRemainder = arrowAmt % 64;
for (int i = 0; i < arrowStacks; ++i) {
gear.add(newItemStack(ItemTypes.ARROW, 64));
}
if (arrowRemainder > 0) {
gear.add(newItemStack(ItemTypes.ARROW, arrowRemainder));
}
for (ItemStack stack : gear) {
player.getInventory().offer(stack);
}
}
private void giveTeamEquipment(Player player, Color teamColor) {
// EquipmentInventory playerEquipment = player.getInventory().query(EquipmentInventory.class);
ItemStack teamHood = newItemStack(ItemTypes.LEATHER_HELMET);
teamHood.offer(Keys.DISPLAY_NAME, Text.of(TextColors.WHITE, "Team Hood"));
teamHood.offer(Keys.COLOR, teamColor);
// playerEquipment.set(EquipmentTypes.HEADWEAR, teamHood);
tf(player).inventory.armorInventory[3] = tf(teamHood);
ItemStack teamChestplate = newItemStack(ItemTypes.LEATHER_CHESTPLATE);
teamChestplate.offer(Keys.DISPLAY_NAME, Text.of(TextColors.WHITE, "Team Chestplate"));
teamChestplate.offer(Keys.COLOR, teamColor);
// playerEquipment.set(EquipmentTypes.CHESTPLATE, teamChestplate);
tf(player).inventory.armorInventory[2] = tf(teamChestplate);
ItemStack teamLeggings = newItemStack(ItemTypes.LEATHER_LEGGINGS);
teamLeggings.offer(Keys.DISPLAY_NAME, Text.of(TextColors.WHITE, "Team Leggings"));
teamLeggings.offer(Keys.COLOR, teamColor);
// playerEquipment.set(EquipmentTypes.LEGGINGS, teamLeggings);
tf(player).inventory.armorInventory[1] = tf(teamLeggings);
ItemStack teamBoots = newItemStack(ItemTypes.LEATHER_BOOTS);
teamBoots.offer(Keys.DISPLAY_NAME, Text.of(TextColors.WHITE, "Team Boots"));
teamBoots.offer(Keys.COLOR, teamColor);
// playerEquipment.set(EquipmentTypes.BOOTS, teamBoots);
tf(player).inventory.armorInventory[0] = tf(teamBoots);
}
private void addPlayer(Player player, Supplier<Location<World>> startingPos, Color teamColor, JungleRaidClass jrClass) {
giveBaseEquipment(player, jrClass);
giveTeamEquipment(player, teamColor);
player.setLocation(startingPos.get());
}
public void addFFAPlayer(Player player, JungleRaidClass jrClass) {
addPlayer(player, this::getRandomLocation, Color.WHITE, jrClass);
freeForAllPlayers.add(player);
teamMapping.put(player, freeForAllPlayers);
}
public void addBluePlayer(Player player, JungleRaidClass jrClass) {
Location<World> spawnPoint = getRandomLocation();
addPlayer(player, () -> spawnPoint, Color.BLUE, jrClass);
blueTeamPlayers.add(player);
teamMapping.put(player, blueTeamPlayers);
}
public void addRedPlayer(Player player, JungleRaidClass jrClass) {
Location<World> spawnPoint = getRandomLocation();
addPlayer(player, () -> spawnPoint, Color.RED, jrClass);
redTeamPlayers.add(player);
teamMapping.put(player, redTeamPlayers);
}
public void smartStart() {
List<Player> ffaList = new ArrayList<>();
List<Player> redList = new ArrayList<>();
List<Player> blueList = new ArrayList<>();
Collection<Player> containedPlayers = getPlayers(PARTICIPANT);
if (containedPlayers.size() <= 1) {
return;
}
for (Player player : containedPlayers) {
BlockState state = player.getLocation().add(0, -1, 0).getBlock();
if (state.getType() != BlockTypes.WOOL) {
return;
}
Optional<?> optColor = state.getTraitValue(EnumTraits.WOOL_COLOR);
if (optColor.isPresent()) {
DyeColor color = (DyeColor) optColor.get();
if (color == DyeColors.RED) {
redList.add(player);
} else if (color == DyeColors.BLUE) {
blueList.add(player);
} else if (color == DyeColors.WHITE) {
ffaList.add(player);
} else {
return;
}
}
}
ffaList.stream().forEach(p -> addFFAPlayer(p, classMap.getOrDefault(p, JungleRaidClass.BALANCED)));
redList.stream().forEach(p -> addRedPlayer(p, classMap.getOrDefault(p, JungleRaidClass.BALANCED)));
blueList.stream().forEach(p -> addBluePlayer(p, classMap.getOrDefault(p, JungleRaidClass.BALANCED)));
state = JungleRaidState.INITIALIZE;
startTime = System.currentTimeMillis();
}
public Location<World> getRandomLocation() {
Vector3i offset = getRegion().getMinimumPoint();
Vector3i boundingBox = getRegion().getBoundingBox();
Vector3i randomDest;
while (true) {
randomDest = new Vector3i(
Probability.getRandom(boundingBox.getX()),
Probability.getRangedRandom(16, 80),
Probability.getRandom(boundingBox.getZ())
).add(offset);
Optional<Location<World>> optSafeDest = SafeTeleportHelper.getSafeDest(
new Location<>(getRegion().getExtent(), randomDest)
);
if (optSafeDest.isPresent()) {
Location<World> safeDest = optSafeDest.get();
if (safeDest.getY() > 16 && safeDest.getY() < 80) {
return safeDest;
}
}
}
}
@Override
public Clause<Player, ZoneStatus> remove(Player player) {
playerLost(player);
return super.remove(player);
}
public void playerLost(Player player) {
Set<Player> teamPlayers = teamMapping.remove(player);
if (teamPlayers != null) {
teamPlayers.remove(player);
player.getInventory().clear();
payPlayer(player);
}
}
public Color getTeamColor(Player player) {
Set<Player> playerTeam = teamMapping.get(player);
if (playerTeam == redTeamPlayers) {
return Color.RED;
} else if (playerTeam == blueTeamPlayers) {
return Color.BLUE;
}
return Color.WHITE;
}
private void payPlayer(Player player) {
}
public void recordAttack(Player attacker, Player defender) {
lastAttackerMap.put(defender, attacker);
}
public Optional<Player> getLastAttacker(Player defender) {
return Optional.ofNullable(lastAttackerMap.get(defender));
}
public boolean isFriendlyFire(Player attacker, Player defender) {
Set<Player> attackerTeam = teamMapping.get(attacker);
Set<Player> defenderTeam = teamMapping.get(defender);
/* We want identity comparison to prevent expensive list comparisons */
return attackerTeam == defenderTeam && attackerTeam != freeForAllPlayers && attackerTeam != null;
}
}
| Actually fix the bow durability
| src/main/java/com/skelril/skree/content/zone/group/jungleraid/JungleRaidInstance.java | Actually fix the bow durability |
|
Java | agpl-3.0 | aabc09a05c51b1bd34780d310eb18f76cae7a2e0 | 0 | vladimirbelinski/Red-Black_Tree | import java.util.*;
class Main{
public static final int QTY = 50; //200000
public static final int SEARCH = 10; //10000
public static int d; // used in a forced remotion;
public static void main(String args[]){
Random generator = new Random();
long start, aux, uma = 0, media = 0, fim =0, total;
RBTree t = new RBTree(0);
start = System.nanoTime();
for(int i = 0; i < QTY; i++){
aux = System.nanoTime();
//t.insert(generator.nextInt(2147483647));
int j = generator.nextInt(1000);
t.insert(j);
System.out.println(j);
uma = System.nanoTime() - aux;
media += System.nanoTime() - aux;
if (i==0) { d = j;}; // used in a forced remotion;
}
//fim = System.nanoTime()-start;
media /= QTY;
//System.out.printf("Time of a single insertion: %.10f\n", uma/ 10e9);
//System.out.printf("Time of all insertions: %.10f\n", fim/ 10e9);
System.out.printf("Average insertion time: %.10f\n", media/ 10e9);
media = 0;
for(int i = 0; i < SEARCH; i++){
aux = System.nanoTime();
t.search(generator.nextInt(2147483647));
media += System.nanoTime() - aux;
}
//---- REMOTION ------
//t.remove(d); // removes a forced element; 'd' can be replaced for a number
//---- 50 NODES---
//System.out.println("------ I am using:"+ d);
//t.find50(d).graph(); // creates a new RBTree with the 50 elemens greater than a node;
t.graph();
total = System.nanoTime() - start;
media /= SEARCH;
System.out.printf("Average seek time: %.10f\n", media/ 10e9);
System.out.printf("Total time: %.10f\n", total / 10e9);
}
}
| Main.java | class Main {
public static void main(String args[]) {
Tree t = new Tree(0);
t.insert(1);
t.graph();
t = t.delete();
}
}
| Now using random, calculating time...
| Main.java | Now using random, calculating time... |
|
Java | agpl-3.0 | 88acbdef3903ee379a34eacff7ac7717b752453a | 0 | geomajas/geomajas-project-client-gwt,geomajas/geomajas-project-server,geomajas/geomajas-project-client-gwt,geomajas/geomajas-project-server,geomajas/geomajas-project-client-gwt2,geomajas/geomajas-project-client-gwt2,geomajas/geomajas-project-server,geomajas/geomajas-project-client-gwt | /*
* This file is part of Geomajas, a component framework for building
* rich Internet applications (RIA) with sophisticated capabilities for the
* display, analysis and management of geographic information.
* It is a building block that allows developers to add maps
* and other geographic data capabilities to their web applications.
*
* Copyright 2008-2010 Geosparc, http://www.geosparc.com, Belgium
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.geomajas.internal.security;
import com.vividsolutions.jts.geom.Geometry;
import org.geomajas.layer.feature.InternalFeature;
import org.geomajas.security.Authentication;
import org.geomajas.security.SecurityContext;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Locale;
/**
* {@link org.geomajas.security.SecurityContext} implementation.
* <p/>
* The security context is a thread scoped service which allows you to query the authorization details for the
* logged in user.
*
* @author Joachim Van der Auwera
*/
@Component
@Scope("thread")
public class SecurityContextImpl implements SecurityContext {
private List<Authentication> authentications;
// user info
private String userId;
private String userName;
private Locale userLocale;
private String userOrganization;
private String userDivision;
public SecurityContextImpl(List<Authentication> authentications) {
this.authentications = authentications;
userInfoInit();
}
/**
* @inheritDoc
*/
public List<Authentication> getSecurityServiceResults() {
return authentications;
}
/**
* @inheritDoc
*/
public String getUserId() {
return userId;
}
/**
* @inheritDoc
*/
public String getUserName() {
return userName;
}
/**
* @inheritDoc
*/
public Locale getUserLocale() {
return userLocale;
}
/**
* @inheritDoc
*/
public String getUserOrganization() {
return userOrganization;
}
/**
* @inheritDoc
*/
public String getUserDivision() {
return userDivision;
}
/**
* Calculate UserInfo strings.
*/
private void userInfoInit() {
boolean first = true;
for (Authentication auth : authentications) {
userId = combine(userName, auth.getUserId());
userName = combine(userName, auth.getUserName());
if (first) {
userLocale = auth.getUserLocale();
first = false;
} else {
if (null != auth.getUserLocale()) {
if (null == userLocale || !userLocale.equals(auth.getUserLocale())) {
userLocale = null;
}
}
}
userOrganization = combine(userOrganization, auth.getUserOrganization());
userDivision = combine(userDivision, auth.getUserDivision());
}
}
/**
* Combine user information strings.
* <p/>
* Extra information is appended (separated by a comma) if not yet present in the string.
*
* @param org base string to append to (avoiding duplication).
* @param add string to add
* @return org + ", " + add
*/
private String combine(String org, String add) {
if (null == org) {
return add;
}
if (org.equals(add) || org.startsWith(add + ", ") || org.endsWith(", " + add)) {
return org;
}
return org + ", " + add;
}
/**
* @inheritDoc
*/
public String getId() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
/**
* @inheritDoc
*/
public boolean isToolAuthorized(String toolId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
/**
* @inheritDoc
*/
public boolean isCommandAuthorized(String commandName) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLayerVisible(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isAttributeReadable(String layerId, InternalFeature feature, String attributeName) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureVisible(String layerId, InternalFeature feature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLayerUpdateAuthorized(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Geometry getVisibleArea(String layerId, CoordinateReferenceSystem crs) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public String getFeatureFilter(String layerId) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLayerCreateAuthorized(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPartlyVisibleSufficient(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureUpdateAuthorized(String layerId, InternalFeature feature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isAttributeWritable(String layerId, InternalFeature feature, String attributeName) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLayerDeleteAuthorized(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Geometry getUpdateAuthorizedArea(String layerId, CoordinateReferenceSystem crs) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureUpdateAuthorized(String layerId, InternalFeature orgFeature, InternalFeature newFeature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPartlyUpdateAuthorizedSufficient(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureDeleteAuthorized(String layerId, InternalFeature feature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureCreateAuthorized(String layerId, InternalFeature feature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Geometry getCreateAuthorizedArea(String layerId, CoordinateReferenceSystem crs) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPartlyCreateAuthorizedSufficient(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Geometry getDeleteAuthorizedArea(String layerId, CoordinateReferenceSystem crs) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPartlyDeleteAuthorizedSufficient(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
}
| geomajas/backend/geomajas-impl/src/main/java/org/geomajas/internal/security/SecurityContextImpl.java | /*
* This file is part of Geomajas, a component framework for building
* rich Internet applications (RIA) with sophisticated capabilities for the
* display, analysis and management of geographic information.
* It is a building block that allows developers to add maps
* and other geographic data capabilities to their web applications.
*
* Copyright 2008-2010 Geosparc, http://www.geosparc.com, Belgium
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.geomajas.internal.security;
import com.vividsolutions.jts.geom.Geometry;
import org.geomajas.layer.feature.Feature;
import org.geomajas.security.Authentication;
import org.geomajas.security.SecurityContext;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Locale;
/**
* {@link org.geomajas.security.SecurityContext} implementation.
* <p/>
* The security context is a thread scoped service which allows you to query the authorization details for the
* logged in user.
*
* @author Joachim Van der Auwera
*/
@Component
@Scope("thread")
public class SecurityContextImpl implements SecurityContext {
private List<Authentication> authentications;
// user info
private String userId;
private String userName;
private Locale userLocale;
private String userOrganization;
private String userDivision;
public SecurityContextImpl(List<Authentication> authentications) {
this.authentications = authentications;
userInfoInit();
}
/**
* @inheritDoc
*/
public List<Authentication> getSecurityServiceResults() {
return authentications;
}
/**
* @inheritDoc
*/
public String getUserId() {
return userId;
}
/**
* @inheritDoc
*/
public String getUserName() {
return userName;
}
/**
* @inheritDoc
*/
public Locale getUserLocale() {
return userLocale;
}
/**
* @inheritDoc
*/
public String getUserOrganization() {
return userOrganization;
}
/**
* @inheritDoc
*/
public String getUserDivision() {
return userDivision;
}
/**
* Calculate UserInfo strings.
*/
private void userInfoInit() {
boolean first = true;
for (Authentication auth : authentications) {
userId = combine(userName, auth.getUserId());
userName = combine(userName, auth.getUserName());
if (first) {
userLocale = auth.getUserLocale();
first = false;
} else {
if (null != auth.getUserLocale()) {
if (null == userLocale || !userLocale.equals(auth.getUserLocale())) {
userLocale = null;
}
}
}
userOrganization = combine(userOrganization, auth.getUserOrganization());
userDivision = combine(userDivision, auth.getUserDivision());
}
}
/**
* Combine user information strings.
* <p/>
* Extra information is appended (separated by a comma) if not yet present in the string.
*
* @param org base string to append to (avoiding duplication).
* @param add string to add
* @return org + ", " + add
*/
private String combine(String org, String add) {
if (null == org) {
return add;
}
if (org.equals(add) || org.startsWith(add + ", ") || org.endsWith(", " + add)) {
return org;
}
return org + ", " + add;
}
/**
* @inheritDoc
*/
public String getId() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
/**
* @inheritDoc
*/
public boolean isToolAuthorized(String toolId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
/**
* @inheritDoc
*/
public boolean isCommandAuthorized(String commandName) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLayerVisible(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isAttributeReadable(String layerId, Feature feature, String attributeName) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureVisible(String layerId, Feature feature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLayerUpdateAuthorized(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Geometry getVisibleArea(String layerId, CoordinateReferenceSystem crs) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public String getFeatureFilter(String layerId) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLayerCreateAuthorized(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPartlyVisibleSufficient(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureUpdateAuthorized(String layerId, Feature feature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isAttributeWritable(String layerId, Feature feature, String attributeName) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLayerDeleteAuthorized(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Geometry getUpdateAuthorizedArea(String layerId, CoordinateReferenceSystem crs) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureUpdateAuthorized(String layerId, Feature orgFeature, Feature newFeature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPartlyUpdateAuthorizedSufficient(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureDeleteAuthorized(String layerId, Feature feature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isFeatureCreateAuthorized(String layerId, Feature feature) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Geometry getCreateAuthorizedArea(String layerId, CoordinateReferenceSystem crs) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPartlyCreateAuthorizedSufficient(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Geometry getDeleteAuthorizedArea(String layerId, CoordinateReferenceSystem crs) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPartlyDeleteAuthorizedSufficient(String layerId) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
}
| MAJ-643 security api works on internal featues (using JTS geometries)
| geomajas/backend/geomajas-impl/src/main/java/org/geomajas/internal/security/SecurityContextImpl.java | MAJ-643 security api works on internal featues (using JTS geometries) |
|
Java | agpl-3.0 | ac76620c6e2dae36007965bb318fac50b41d5591 | 0 | splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine | package com.splicemachine.test;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.splicemachine.constants.SIConstants;
import com.splicemachine.constants.SpliceConstants;
import com.splicemachine.derby.hbase.*;
import com.splicemachine.derby.impl.job.coprocessor.CoprocessorTaskScheduler;
import com.splicemachine.si.coprocessors.SIObserver;
import com.splicemachine.si.coprocessors.TimestampMasterObserver;
import com.splicemachine.si.coprocessors.TxnLifecycleEndpoint;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import java.util.List;
import static com.google.common.collect.Lists.transform;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* HBase configuration for SpliceTestPlatform and SpliceTestClusterParticipant.
*/
class SpliceTestPlatformConfig {
private static final List<Class<?>> REGION_COPROCESSORS = ImmutableList.<Class<?>>of(
SpliceOperationRegionObserver.class,
SpliceIndexObserver.class,
SpliceDerbyCoprocessor.class,
SpliceIndexManagementEndpoint.class,
SpliceIndexEndpoint.class,
CoprocessorTaskScheduler.class,
TxnLifecycleEndpoint.class,
SIObserver.class);
private static final List<Class<?>> MASTER_COPROCESSORS = ImmutableList.<Class<?>>of(
SpliceMasterObserver.class,
TimestampMasterObserver.class);
/*
* Create an HBase config object suitable for use in our test platform.
*/
public static Configuration create(String hbaseRootDirUri,
Integer masterPort,
Integer masterInfoPort,
Integer regionServerPort,
Integer regionServerInfoPort,
Integer derbyPort,
boolean failTasksRandomly) {
Configuration config = HBaseConfiguration.create();
//
// Coprocessors
//
config.set("hbase.coprocessor.region.classes", getRegionCoprocessorsAsString());
config.set("hbase.coprocessor.master.classes", getMasterCoprocessorsAsString());
//
// Networking
//
config.set("hbase.zookeeper.quorum", "127.0.0.1:2181");
config.setInt("hbase.master.port", masterPort);
config.setInt("hbase.master.info.port", masterInfoPort);
config.setInt("hbase.regionserver.port", regionServerPort);
config.setInt("hbase.regionserver.info.port", regionServerInfoPort);
config.setInt("hbase.master.jmx.port", 10102); // this is set because the HBase master and regionserver are running on the same machine and in the same JVM
config.setInt(SpliceConstants.DERBY_BIND_PORT, derbyPort);
//
// Networking -- interfaces
//
// force use of loop back interface on MacOSX, else don't set it
if (System.getProperty("os.name").contains("Mac") ) {
String interfaceName = "lo0";
config.set("hbase.zookeeper.dns.interface", interfaceName);
config.set("hbase.master.dns.interface", interfaceName);
config.set("hbase.regionserver.dns.interface", interfaceName);
}
//
// File System
//
config.set("fs.default.name", "file:///"); // MapR Hack, tells it local filesystem
// Must allow Cygwin instance to config its own rootURI
if (!"CYGWIN".equals(hbaseRootDirUri)) {
config.set("hbase.rootdir", hbaseRootDirUri);
}
//
// Threads, timeouts
//
config.setLong("hbase.rpc.timeout", MINUTES.toMillis(2));
config.setLong("hbase.regionserver.lease.period", MINUTES.toMillis(2));
config.setLong("hbase.regionserver.handler.count", 200);
config.setLong("hbase.regionserver.msginterval", 1000);
config.setLong("hbase.master.event.waiting.time", 20);
config.setLong("hbase.master.lease.thread.wakefrequency", SECONDS.toMillis(3));
config.setLong("hbase.server.thread.wakefrequency", SECONDS.toMillis(1));
config.setLong("hbase.client.pause", 100);
//
// Compaction Controls
//
config.setLong("hbase.hstore.compaction.min", 5); // min number of eligible files before we compact
config.setLong("hbase.hstore.compaction.max", 10); // max files to be selected for a single minor compaction
config.setLong("hbase.hstore.compaction.min.size", 16 * MiB); // store files smaller than this will always be eligible for minor compaction. HFiles this size or larger are evaluated by hbase.hstore.compaction.ratio to determine if they are eligible
config.setLong("hbase.hstore.compaction.max.size", 248 * MiB); // store files larger than this will be excluded from compaction
config.setFloat("hbase.hstore.compaction.ratio", 1.25f); // default is 1.2f, at one point we had this set to 0.25f and 25f (which was likely a typo)
//
// Memstore, store files, splits
//
config.setLong(HConstants.HREGION_MAX_FILESIZE, 1024 * MiB); // hbase.hregion.max.filesize
config.setLong("hbase.hregion.memstore.flush.size", 128 * MiB); // was 512 MiB
config.setLong("hbase.hregion.memstore.block.multiplier", 4);
config.setFloat("hbase.regionserver.global.memstore.size", 0.25f); // set mem store to 25% of heap
config.setLong("hbase.hstore.blockingStoreFiles", 20);
config.set("hbase.regionserver.region.split.policy", "org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy"); // change default split policy. this makes more sense for a standalone/single regionserver
//
// HFile
//
config.setInt("hfile.index.block.max.size", 16 * 1024); // 16KiB
config.setFloat("hfile.block.cache.size", 0.25f); // set block cache to 25% of heap
config.setFloat("io.hfile.bloom.error.rate", (float) 0.005);
config.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true); // hfile.block.bloom.cacheonwrite
//
// Misc
//
config.set("hbase.cluster.distributed", "true"); // don't start zookeeper for us
config.set("hbase.master.distributed.log.splitting", "false"); // TODO: explain why we are setting this
//
// Splice
//
config.setLong("splice.ddl.drainingWait.maximum", SECONDS.toMillis(15)); // wait 15 seconds before bailing on bad ddl statements
config.setDouble(SpliceConstants.DEBUG_TASK_FAILURE_RATE, 0.05d);
config.setBoolean(SpliceConstants.DEBUG_FAIL_TASKS_RANDOMLY, failTasksRandomly);
config.reloadConfiguration();
SIConstants.reloadConfiguration(config);
return config;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
private static final long MiB = 1024L * 1024L;
private static final Function<Class, String> CLASS_NAME_FUNC = new Function<Class, String>() {
@Override
public String apply(Class input) {
return input.getCanonicalName();
}
};
private static String getRegionCoprocessorsAsString() {
return Joiner.on(",").join(transform(REGION_COPROCESSORS, CLASS_NAME_FUNC));
}
private static String getMasterCoprocessorsAsString() {
return Joiner.on(",").join(transform(MASTER_COPROCESSORS, CLASS_NAME_FUNC));
}
}
| splice_machine_test/src/main/java/com/splicemachine/test/SpliceTestPlatformConfig.java | package com.splicemachine.test;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.splicemachine.constants.SIConstants;
import com.splicemachine.constants.SpliceConstants;
import com.splicemachine.derby.hbase.*;
import com.splicemachine.derby.impl.job.coprocessor.CoprocessorTaskScheduler;
import com.splicemachine.si.coprocessors.SIObserver;
import com.splicemachine.si.coprocessors.TimestampMasterObserver;
import com.splicemachine.si.coprocessors.TxnLifecycleEndpoint;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import java.util.List;
import static com.google.common.collect.Lists.transform;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* HBase configuration for SpliceTestPlatform and SpliceTestClusterParticipant.
*/
class SpliceTestPlatformConfig {
private static final List<Class<?>> REGION_COPROCESSORS = ImmutableList.<Class<?>>of(
SpliceOperationRegionObserver.class,
SpliceIndexObserver.class,
SpliceDerbyCoprocessor.class,
SpliceIndexManagementEndpoint.class,
SpliceIndexEndpoint.class,
CoprocessorTaskScheduler.class,
TxnLifecycleEndpoint.class,
SIObserver.class);
private static final List<Class<?>> MASTER_COPROCESSORS = ImmutableList.<Class<?>>of(
SpliceMasterObserver.class,
TimestampMasterObserver.class);
/*
* Create an HBase config object suitable for use in our test platform.
*/
public static Configuration create(String hbaseRootDirUri,
Integer masterPort,
Integer masterInfoPort,
Integer regionServerPort,
Integer regionServerInfoPort,
Integer derbyPort,
boolean failTasksRandomly) {
Configuration config = HBaseConfiguration.create();
//
// Coprocessors
//
config.set("hbase.coprocessor.region.classes", getRegionCoprocessorsAsString());
config.set("hbase.coprocessor.master.classes", getMasterCoprocessorsAsString());
//
// Networking
//
config.set("hbase.zookeeper.quorum", "127.0.0.1:2181");
config.setInt("hbase.master.port", masterPort);
config.setInt("hbase.master.info.port", masterInfoPort);
config.setInt("hbase.regionserver.port", regionServerPort);
config.setInt("hbase.regionserver.info.port", regionServerInfoPort);
config.setInt("hbase.master.jmx.port", 10102);
config.setInt(SpliceConstants.DERBY_BIND_PORT, derbyPort);
//
// Networking -- interfaces
//
String interfaceName = System.getProperty("os.name").contains("Mac") ? "lo0" : "default";
config.set("hbase.zookeeper.dns.interface", interfaceName);
config.set("hbase.regionserver.dns.interface", interfaceName);
config.set("hbase.master.dns.interface", interfaceName);
//
// File System
//
// MapR Hack, tells it local filesystem
config.set("fs.default.name", "file:///");
// Must allow Cygwin instance to config its own rootURI
if (!"CYGWIN".equals(hbaseRootDirUri)) {
config.set("hbase.rootdir", hbaseRootDirUri);
}
//
// Threads, timeouts
//
config.setLong("hbase.rpc.timeout", MINUTES.toMillis(2));
config.setLong("hbase.regionserver.lease.period", MINUTES.toMillis(2));
config.setLong("hbase.regionserver.handler.count", 200);
config.setLong("hbase.regionserver.msginterval", 1000);
config.setLong("hbase.master.event.waiting.time", 20);
config.setLong("hbase.master.lease.thread.wakefrequency", SECONDS.toMillis(3));
config.setLong("hbase.server.thread.wakefrequency", SECONDS.toMillis(1));
config.setLong("hbase.client.pause", 100);
//
// Compaction Controls
//
config.setLong("hbase.hstore.compaction.min", 5); // Minimum Number of Files for Compaction
config.setLong("hbase.hstore.compaction.max", 10);
config.setLong("hbase.hstore.compaction.min.size", 16 * MiB);
config.setLong("hbase.hstore.compaction.max.size", 248 * MiB);
//
// Memstore, store files, splits
//
config.setLong("hbase.hregion.max.filesize", 1024 * MiB);
config.setLong("hbase.hregion.memstore.flush.size", 512 * MiB); // Way too high
config.setLong("hbase.hregion.memstore.block.multiplier", 4);
config.setFloat("hbase.regionserver.global.memstore.size", 0.25f);
config.setLong("hbase.hstore.blockingStoreFiles", 20);
config.set("hbase.regionserver.region.split.policy", "org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy");
//
// HFile
//
config.setInt("hfile.index.block.max.size", 16 * 1024); // Aaron
config.setFloat("hfile.block.cache.size", 0.25f);
config.setFloat("io.hfile.bloom.error.rate", (float) 0.005);
//
// Misc
//
config.set("hbase.cluster.distributed", "true");
config.set("hbase.master.distributed.log.splitting", "false"); // Why?
config.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);
//
// Splice
//
//wait 15 seconds before bailing on bad ddl statements
config.setLong("splice.ddl.drainingWait.maximum", SECONDS.toMillis(15));
config.setDouble(SpliceConstants.DEBUG_TASK_FAILURE_RATE, 0.05d);
config.setBoolean(SpliceConstants.DEBUG_FAIL_TASKS_RANDOMLY, failTasksRandomly);
config.reloadConfiguration();
SIConstants.reloadConfiguration(config);
return config;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
private static final long MiB = 1024L * 1024L;
private static final Function<Class, String> CLASS_NAME_FUNC = new Function<Class, String>() {
@Override
public String apply(Class input) {
return input.getCanonicalName();
}
};
private static String getRegionCoprocessorsAsString() {
return Joiner.on(",").join(transform(REGION_COPROCESSORS, CLASS_NAME_FUNC));
}
private static String getMasterCoprocessorsAsString() {
return Joiner.on(",").join(transform(MASTER_COPROCESSORS, CLASS_NAME_FUNC));
}
}
| DB-2418 - adding comments to cleaned up configuration, changing a parameter and adding one
| splice_machine_test/src/main/java/com/splicemachine/test/SpliceTestPlatformConfig.java | DB-2418 - adding comments to cleaned up configuration, changing a parameter and adding one |
|
Java | lgpl-2.1 | 2077d37ed9432b29847579e1aaa203c973eebf53 | 0 | SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package ch.unizh.ini.jaer.projects.virtualslotcar;
import com.sun.opengl.util.GLUT;
import com.sun.opengl.util.j2d.TextRenderer;
import java.awt.Color;
import java.awt.Font;
import java.awt.Point;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import javax.media.opengl.GLCanvas;
import javax.media.opengl.GLException;
import net.sf.jaer.graphics.ChipCanvas;
import net.sf.jaer.graphics.MultilineAnnotationTextRenderer;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Random;
import javax.media.opengl.GL;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.glu.GLU;
import javax.media.opengl.glu.GLUquadric;
import net.sf.jaer.chip.AEChip;
import net.sf.jaer.event.EventPacket;
import net.sf.jaer.eventprocessing.FilterChain;
import net.sf.jaer.graphics.FrameAnnotater;
import net.sf.jaer.util.StateMachineStates;
/**
* Learns the throttle at different part of the track.
* <p>
* After a discussion with Garrick and Tobias Glassmachers we decided to go
* for coding up a learning approach that saves successful
* ThrottleSetting profiles if the car makes it around the track twice, and then randomly perturbs the
* throttleValues to increase the throttle smoothly somewhere along the track. If the change causes a
* crash, we go back to the saved throttleValues and perturb again, using a random bump
* of throttle increase somewhere on the track. This approach will guarantee increase in
* speed and will always eventually cause a crash but we can add a button to go back to the last
* successful throttleValues. The track model is the basis of this because it tells us where we are.
*
* @author Juston, Tobi
*/
public class EvolutionaryThrottleController extends AbstractSlotCarController implements SlotCarControllerInterface, FrameAnnotater, MouseListener, MouseMotionListener, PropertyChangeListener {
public static String getDescription() {
return "Evolution-based slot car throttle controller";
}
// prefs
private int numSegmentsToBrakeBeforeCrash=getInt("numSegmentsToBrakeBeforeCrash",2);
private int numSegmentsSpacingFromCrashToBrakingPoint=getInt("numSegmentsSpacingFromCrashToBrakingPoint",4);
private float fractionOfTrackToSpeedUp = getFloat("fractionOfTrackToSpeedUp", 0.3f);
private float fractionOfTrackToSlowDownPreCrash = getFloat("fractionOfTrackToSlowDownPreCrash", .15f);
private float defaultThrottleValue = getFloat("defaultThrottle", .1f); // default throttle setting if no car is detected
private ThrottleBrake defaultThrottle=new ThrottleBrake(defaultThrottleValue, false);
private boolean learningEnabled = getBoolean("learningEnabled", false);
private float throttleChange = getFloat("throttleChange", 0.03f);
private float editThrottleChange = getFloat("editThrottleChange", 0.2f);
private int numSuccessfulLapsToReward = getInt("numSuccessfulLapsToReward", 2);
private float startingThrottleValue = getFloat("startingThrottleValue", .1f);
private boolean showThrottleProfile = getBoolean("showThrottleProfile", true);
private GLUT glut=new GLUT();
/** possible states,
* <ol>
* <li> STARTING means no car is tracked or tracker has not found a car cluster near the track model,
* <li> RUNNING is the active state,
* <li> CRASHED is the state if we were RUNNING and the car tracker has tracked the car
* sufficiently far away from the track model,
* <li> STALLED is the state if the car has stopped being tracked but the last tracked position was on the track
* because it has stalled out and stopped moving. is after there have not been any definite balls for a while and we are waiting for a clear ball directed
* </ol>
*/
public enum State {
OVERRIDDEN, STARTING, RUNNING, CRASHED
}
protected class RacerState extends StateMachineStates {
State state = State.STARTING;
@Override
public Enum getInitial() {
return State.STARTING;
}
}
private RacerState state = new RacerState();
// vars
private ThrottleBrake throttle = new ThrottleBrake(); // last output throttle setting
private int currentTrackPos; // position in spline parameter of track
private int lastRewardLap = 0;
private ThrottleProfile currentProfile, lastSuccessfulProfile, lastSuccessfulProfileEvenOlder;
private Random random = new Random();
LapTimer lapTimer = null;
private int lapTime;
private int prevLapTime;
private TrackdefineFilter trackDefineFilter;
private FilterChain filterChain;
private TwoCarTracker carTracker;
private TwoCarTracker.TwoCarCluster car = null;
private boolean showedMissingTrackWarning = false;
private SlotcarSoundEffects sounds = null;
private int lastCrashLocation = -1;
private GLCanvas glCanvas;
private ChipCanvas canvas;
public EvolutionaryThrottleController(AEChip chip) {
super(chip);
final String s = "EvolutionaryThrottleController";
setPropertyTooltip(s, "defaultThrottle", "default throttle setting if no car is detected; also starting throttle after resetting learning and minimum allowed throttle");
setPropertyTooltip(s, "fractionOfTrackToPunish", "fraction of track to reduce throttle and mark for no reward");
setPropertyTooltip(s, "learningEnabled", "enable evolution - successful profiles are sped up, crashes cause reversion to last successful profile");
setPropertyTooltip(s, "throttleChange", "max amount to increase throttle for perturbation");
setPropertyTooltip(s, "editThrottleChange", "amount to change throttle for mouse edits of the throttle profile");
setPropertyTooltip(s, "numSuccessfulLapsToReward", "number of successful (no crash) laps between rewards");
setPropertyTooltip(s, "numSegmentsToBrakeBeforeCrash", "number track segments to brake for just prior to crash location");
setPropertyTooltip(s, "numSegmentsSpacingFromCrashToBrakingPoint", "number track segments before crash that braking segments start");
setPropertyTooltip(s, "fractionOfTrackToSpeedUp", "fraction of track spline points to increase throttle on after successful laps");
setPropertyTooltip(s, "fractionOfTrackToSlowDownPreCrash", "fraction of track spline points before crash point to reduce throttle on");
setPropertyTooltip(s, "startingThrottleValue", "throttle value when starting (no car cluster detected)");
setPropertyTooltip(s, "showThrottleProfile", "displays the throttle profile, with dot size reprenting the throttle value");
// do methods
setPropertyTooltip(s, "guessThrottleFromTrackModel", "guess initial throttle profile from track model");
setPropertyTooltip(s, "resetAllThrottleValues", "reset all profile points to defaultThrottle");
setPropertyTooltip(s, "loadThrottleSettings", "load profile from preferences");
setPropertyTooltip(s, "saveThrottleSettings", "save profile to preferences");
setPropertyTooltip(s, "revertToLastSuccessfulProfile", "explicitly revert profile to last one that made it around the track at least numSuccessfulLapsToReward");
setPropertyTooltip(s, "slowDown", "reduce all profile point throttle settings");
setPropertyTooltip(s, "speedUp", "increase all profile point throttle settings");
doLoadThrottleSettings();
filterChain = new FilterChain(chip);
trackDefineFilter = new TrackdefineFilter(chip);
trackDefineFilter.setEnclosed(true, this);
carTracker = new TwoCarTracker(chip);
carTracker.setTrack(trackDefineFilter.getTrack());
carTracker.setEnclosed(true, this);
carTracker.addObserver(trackDefineFilter); // so that track define filter can getString the tracker output
filterChain.add(trackDefineFilter);
filterChain.add(carTracker);
trackDefineFilter.getSupport().addPropertyChangeListener(SlotcarTrack.EVENT_TRACK_CHANGED, this);
trackDefineFilter.getSupport().addPropertyChangeListener(SlotcarTrack.EVENT_TRACK_CHANGED, carTracker);
lapTimer = new LapTimer(getTrack());
trackDefineFilter.getSupport().addPropertyChangeListener(SlotcarTrack.EVENT_TRACK_CHANGED, lapTimer);
setEnclosedFilterChain(filterChain);
try {
sounds = new SlotcarSoundEffects(0);
} catch (Exception ex) {
log.warning("No sound effects available: " + ex.toString());
}
if (chip.getCanvas() != null && chip.getCanvas().getCanvas() != null) {
glCanvas = (GLCanvas) chip.getCanvas().getCanvas();
}
}
@Override
public EventPacket<?> filterPacket(EventPacket<?> in) {
if (trackDefineFilter.getTrack() != null && (currentProfile == null || currentProfile.getNumPoints() != getTrack().getNumPoints())) {
currentProfile = new ThrottleProfile(getTrack().getNumPoints());
log.info("made a new ThrottleProfile :" + currentProfile);
}
out = getEnclosedFilterChain().filterPacket(in); // does cartracker and maybe trackdefinefilter
car = carTracker.findCarCluster();
if (car != null) {
currentTrackPos = car.segmentIdx;
}
// choose state & set throttle
float prevThrottle = throttle.throttle;
if (state.get() == State.OVERRIDDEN) {
// throttle.throttle = getStartingThrottleValue();
} else if (state.get() == State.STARTING) {
// throttle.throttle = getStartingThrottleValue();
if (car != null && car.isRunning()) {
state.set(State.RUNNING);
}
} else if (state.get() == State.RUNNING) {
if (trackDefineFilter.getTrack() == null) {
if (!showedMissingTrackWarning) {
log.warning("Track not defined yet. Use the TrackdefineFilter to extract the slot car track or load the track from a file.");
}
showedMissingTrackWarning = true;
} else {
if (car != null && !car.crashed) {
// did we lap?
boolean lapped = lapTimer.update(currentTrackPos, car.getLastEventTimestamp());
if (lapped) {
lapTime = lapTimer.getLastLap().laptimeUs;
int dt = lapTime - prevLapTime;
if (dt < 0) {
log.info("lap time improved by " + dt / 1000 + " ms");
} else if(dt>0) {
log.info("lap time worsened by " + dt / 1000 + " ms");
}
prevLapTime = lapTime;
}
if (learningEnabled && lapTimer.lapCounter - lastRewardLap > numSuccessfulLapsToReward) {
try {
log.info("successfully drove " + lapTimer.lapCounter + " laps; cloning this profile and rewarding currentProfile");
if (lastSuccessfulProfile != null) {
lastSuccessfulProfileEvenOlder = (ThrottleProfile) lastSuccessfulProfile.clone(); // save backup copy of last successfull
}
if (currentProfile != null) {
lastSuccessfulProfile = (ThrottleProfile) currentProfile.clone(); // save current as successful
}
} catch (CloneNotSupportedException e) {
throw new RuntimeException("couldn't clone the current throttle profile: " + e);
}
currentProfile.addBump();
lastRewardLap = lapTimer.lapCounter;
}
}
if (carTracker.getCrashedCar() != null) {
state.set(State.CRASHED);
lastCrashLocation = car.crashSegment;
// throttle.throttle = getStartingThrottleValue(); // don't actually change profile, starting comes from getThrottle
sounds.play();
if (learningEnabled) {
if (lastSuccessfulProfile != null && currentProfile != lastSuccessfulProfile) {
log.info("crashed at segment" + lastCrashLocation + ", switching back to previous profile");
currentProfile = lastSuccessfulProfile;
}
if(numSegmentsToBrakeBeforeCrash>0){
currentProfile.addBrake(carTracker.getCrashedCar().crashSegment);
}else{
currentProfile.subtractBump(carTracker.getCrashedCar().crashSegment);
}
}
lastRewardLap = lapTimer.lapCounter; // don't reward until we make some laps from here
} else {
throttle = currentProfile.getThrottle(car.segmentIdx);
}
}
} else if (state.get() == State.CRASHED) {
// throttle.throttle = getStartingThrottleValue();
state.set(State.STARTING);
}
setBigStatusText(state.toString(), Color.RED);
return out;
}
private TextRenderer statusRenderer = null;
private Color bigStatusColor = Color.WHITE;
private String bigStatusText = null;
synchronized private void setBigStatusText(String s, Color c) {
bigStatusText = s;
bigStatusColor = c;
}
synchronized private void renderBigStatusText(GLAutoDrawable drawable) {
if (bigStatusText == null) {
return;
}
if (statusRenderer == null) {
statusRenderer = new TextRenderer(new Font("Serif", Font.BOLD, 60));
}
statusRenderer.setColor(bigStatusColor);
Rectangle2D bounds = statusRenderer.getBounds(bigStatusText);
statusRenderer.beginRendering(drawable.getWidth(), drawable.getHeight());
statusRenderer.draw(bigStatusText, (int) (drawable.getWidth() / 2 - bounds.getWidth() / 2), (int) (drawable.getHeight() / 2 - bounds.getHeight() / 2));
statusRenderer.endRendering();
}
/** Computes throttle using tracker output and ThrottleProfile.
*
* @param tracker
* @param track
* @return the throttle from 0-1.
*/
synchronized public ThrottleBrake computeControl(CarTracker tracker, SlotcarTrack track) {
return throttle;
}
synchronized public void doResetAllThrottleValues() {
if (currentProfile == null) {
log.warning("cannot reset until profile exists");
return;
}
currentProfile.reset();
}
synchronized public void doGuessThrottleFromTrackModel() {
if (currentProfile == null) {
log.warning("cannot guess until profile exists");
return;
}
currentProfile.guessThrottleFromTrackModel();
}
synchronized public void doSaveThrottleSettings() {
if (currentProfile == null) {
log.warning("no profile to save");
return;
}
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(currentProfile.numPoints);
oos.writeObject(currentProfile.throttleValues);
prefs().putByteArray("EvolutionaryThrottleController.throttleProfile", bos.toByteArray());
oos.close();
bos.close();
log.info("throttle settings saveed to preferences");
} catch (Exception e) {
log.warning("couldn't save profile: " + e);
}
}
public final synchronized void doLoadThrottleSettings() {
try {
byte[] b = prefs().getByteArray("EvolutionaryThrottleController.throttleProfile", null);
if (b == null) {
log.info("no throttle settings saved in preferences, can't load them");
return;
}
ByteArrayInputStream bis = new ByteArrayInputStream(b);
ObjectInputStream ois = new ObjectInputStream(bis);
Object o = ois.readObject();
if (o == null) {
throw new NullPointerException("Couldn't read Integer number of throttle points from preferences");
}
int n = ((Integer) o).intValue();
o = ois.readObject();
if (o == null) {
throw new NullPointerException("Couldn't read float array of throttle points from preferences");
}
ThrottleBrake[] f = (ThrottleBrake[]) o;
currentProfile = new ThrottleProfile(f);
ois.close();
bis.close();
log.info("loaded throttle profile from preferencdes: " + currentProfile);
} catch (Exception e) {
log.warning("couldn't load throttle profile: " + e);
}
}
synchronized public void doSlowDown() {
if (currentProfile != null) {
currentProfile.slowDown();
log.info("slowed down current profile to " + currentProfile);
}
}
synchronized public void doSpeedUp() {
if (currentProfile != null) {
currentProfile.speedUp();
log.info("speeded up current profile to " + currentProfile);
}
}
synchronized public void doRevertToLastSuccessfulProfile() {
if (lastSuccessfulProfileEvenOlder != null) {
currentProfile = lastSuccessfulProfileEvenOlder;
log.info("reverted to " + lastSuccessfulProfileEvenOlder);
} else {
log.info("cannot revert - no lastSuccessfulProfileEvenOlder stored yet");
}
}
private float clipThrottle(float t) {
if (t > 1) {
t = 1;
} else if (t < defaultThrottleValue) {
t = defaultThrottleValue;
}
return t;
}
final ThrottleBrake startingThrottle=new ThrottleBrake(startingThrottleValue,false);
public ThrottleBrake getThrottle() {
Enum s = state.get();
if (s == State.RUNNING) {
return throttle;
} else if (s == State.CRASHED || s == State.STARTING) {
startingThrottle.throttle = startingThrottleValue;
return startingThrottle;
} else if (s == State.OVERRIDDEN) {
startingThrottle.throttle = defaultThrottleValue;
return startingThrottle;
} else {
throw new Error("state not found for RacerState, shouldn't happen");
}
}
@Override
public String logControllerState() {
return String.format("%s\t%d\t%f\t%s", state, currentTrackPos, throttle, car);
}
@Override
public String logContents() {
return "state currentTrackPos throttle car ";
}
@Override
public void resetFilter() {
state.set(State.STARTING);
lapTimer.reset();
getEnclosedFilterChain().reset();
lastCrashLocation = -1;
if (currentProfile != null) {
currentProfile.resetMarkedSegments();
}
lastRewardLap = 0;
state.set(State.STARTING);
}
@Override
public void initFilter() {
}
@Override
public synchronized void setFilterEnabled(boolean yes) {
super.setFilterEnabled(yes);
trackDefineFilter.setFilterEnabled(false); // don't enable by default
}
@Override
synchronized public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName() == SlotcarTrack.EVENT_TRACK_CHANGED) {
SlotcarTrack track = (SlotcarTrack) evt.getNewValue();
if (currentProfile==null || track.getNumPoints() != currentProfile.getNumPoints()) {
log.warning("new track has different number of points than current throttle profile, making a new default profile");
currentProfile = new ThrottleProfile(track.getNumPoints());
}
}
}
/**
* @return the defaultThrottle
*/
public float getDefaultThrottle() {
return defaultThrottleValue;
}
/**
* @param defaultThrottle the defaultThrottle to set
*/
public void setDefaultThrottle(float defaultThrottle) {
this.defaultThrottleValue = defaultThrottle;
putFloat("defaultThrottle", defaultThrottle);
}
GLU glu = new GLU();
GLUquadric quad = null;
@Override
public void annotate(GLAutoDrawable drawable) {
String s = String.format("EvolutionaryThrottleController\nState: %s\ncurrentTrackPos: %d\nThrottle: %8.3f\n%s", state.toString(), currentTrackPos, throttle.throttle, lapTimer.toString());
// if(state.getString()==State.CRASHED){
//
// }else if(state.getString()==State.RUNNING){
//
// }else{
// }
MultilineAnnotationTextRenderer.renderMultilineString(s);
if (showThrottleProfile) {
drawThrottleProfile(drawable.getGL());
}
drawCurrentTrackPoint(drawable.getGL());
drawLastCrashLocation(drawable.getGL());
canvas = chip.getCanvas();
glCanvas = (GLCanvas) canvas.getCanvas();
drawThrottlePainter(drawable);
renderBigStatusText(drawable);
}
/** Displays the extracted track points */
private void drawThrottleProfile(GL gl) {
if (getTrack() != null && getTrack().getPointList() != null && currentProfile != null) {
// Plot lines
gl.glColor4f(.5f, 0, 0, .5f);
gl.glLineWidth(.5f);
gl.glBegin(gl.GL_LINE_STRIP);
for (Point2D p : getTrack().getPointList()) {
gl.glVertex2d(p.getX(), p.getY());
}
gl.glEnd();
// plot throttle values and braking locations
gl.glColor4f(.5f, 0, 0, .5f);
// Draw extracted points
float maxSize = 40f;
int idx = 0;
for (Point2D p : getTrack().getPointList()) {
float size = maxSize * currentProfile.getThrottle(idx).throttle;
if (size < 1) {
size = 1;
}
if (currentProfile.getBrake(idx)) {
// if braking segment, we draw X there, in orange
gl.glColor4f(.5f,.25f,0,.5f);
gl.glPushMatrix();
gl.glTranslatef((float)p.getX(), (float)p.getY(), 0);
final int scale=2;
gl.glLineWidth(3);
gl.glBegin(GL.GL_LINES);
gl.glVertex2f(-scale,-scale);
gl.glVertex2f(scale,scale);
gl.glVertex2f(scale,-scale);
gl.glVertex2f(-scale,scale);
gl.glEnd();
gl.glPopMatrix();
} else {
// throttle value and if sped up or slowed down
gl.glPointSize(size);
float rgb[] = {0, 0, .5f};
if (currentProfile.spedUpSegments[idx]) {
rgb[1] = 1; // green was sped up
}
if (currentProfile.slowedDownSegments[idx]) {
rgb[0] = 1; // red was slowed down
}
gl.glColor3fv(rgb, 0);
gl.glBegin(gl.GL_POINTS);
gl.glVertex2d(p.getX(), p.getY());
gl.glEnd();
}
idx++;
}
}
chip.getCanvas().checkGLError(gl, glu, "in TrackdefineFilter.drawThrottleProfile");
}
private TextRenderer textRenderer = null;
private void drawCurrentTrackPoint(GL gl) {
if (currentTrackPos == -1 || getTrack() == null) {
return;
}
gl.glColor4f(1, 0, 0, .5f);
Point2D p = getTrack().getPoint(currentTrackPos);
gl.glRectd(p.getX() - 1, p.getY() - 1, p.getX() + 1, p.getY() + 1);
}
private void drawLastCrashLocation(GL gl) {
if (lastCrashLocation == -1) {
return;
}
if (textRenderer == null) {
textRenderer = new TextRenderer(new Font("SansSerif", Font.PLAIN, 24), true, true);
}
textRenderer.setColor(Color.yellow);
textRenderer.begin3DRendering();
Point2D p = getTrack().getPoint(lastCrashLocation);
textRenderer.draw3D("last crash", (float) p.getX(), (float) p.getY(), 0, .2f);
textRenderer.end3DRendering();
gl.glPointSize(10);
gl.glColor3f(1, 0, 0);
gl.glBegin(GL.GL_POINTS);
gl.glVertex2d(p.getX(), p.getY());
gl.glEnd();
}
/**
* @return the showThrottleProfile
*/
public boolean isShowThrottleProfile() {
return showThrottleProfile;
}
/**
* @param showThrottleProfile the showThrottleProfile to set
*/
public void setShowThrottleProfile(boolean showThrottleProfile) {
this.showThrottleProfile = showThrottleProfile;
}
/**
* @return the learning
*/
public boolean isLearningEnabled() {
return learningEnabled;
}
/**
* @param learning the learning to set
*/
public void setLearningEnabled(boolean learning) {
this.learningEnabled = learning;
putBoolean("learningEnabled", learningEnabled);
}
/**
* @return the throttlePunishment
*/
public float getThrottleChange() {
return throttleChange;
}
/**
* @param change the throttlePunishment to set
*/
public void setThrottleChange(float change) {
if (change > 1) {
change = 1;
} else if (change < 0) {
change = 0;
}
this.throttleChange = change;
putFloat("throttleChange", throttleChange);
}
/**
* @return the fractionOfTrackToPunish
*/
public float getFractionOfTrackToSpeedUp() {
return fractionOfTrackToSpeedUp;
}
/**
* @param fractionOfTrackToSpeedUp the fractionOfTrackToPunish to set
*/
synchronized public void setFractionOfTrackToSpeedUp(float fractionOfTrackToSpeedUp) {
if (fractionOfTrackToSpeedUp < 0) {
fractionOfTrackToSpeedUp = 0;
} else if (fractionOfTrackToSpeedUp > 1) {
fractionOfTrackToSpeedUp = 1;
}
this.fractionOfTrackToSpeedUp = fractionOfTrackToSpeedUp;
putFloat("fractionOfTrackToSpeedUp", fractionOfTrackToSpeedUp);
}
/**
* @return the numSegmentsToBrakeBeforeCrash
*/
public int getNumSegmentsToBrakeBeforeCrash() {
return numSegmentsToBrakeBeforeCrash;
}
/**
* @return the numSegmentsSpacingFromCrashToBrakingPoint
*/
public int getNumSegmentsSpacingFromCrashToBrakingPoint() {
return numSegmentsSpacingFromCrashToBrakingPoint;
}
/**
* @param numSegmentsSpacingFromCrashToBrakingPoint the numSegmentsSpacingFromCrashToBrakingPoint to set
*/
public void setNumSegmentsSpacingFromCrashToBrakingPoint(int numSegmentsSpacingFromCrashToBrakingPoint) {
if (numSegmentsSpacingFromCrashToBrakingPoint < 0) {
numSegmentsSpacingFromCrashToBrakingPoint = 0;
}
int old = this.numSegmentsSpacingFromCrashToBrakingPoint;
this.numSegmentsSpacingFromCrashToBrakingPoint = numSegmentsSpacingFromCrashToBrakingPoint;
putInt("numSegmentsSpacingFromCrashToBrakingPoint", numSegmentsSpacingFromCrashToBrakingPoint);
getSupport().firePropertyChange("numSegmentsSpacingFromCrashToBrakingPoint", old, numSegmentsSpacingFromCrashToBrakingPoint);
}
/**
* @param numSegmentsToBrakeBeforeCrash the numSegmentsToBrakeBeforeCrash to set
*/
public void setNumSegmentsToBrakeBeforeCrash(int numSegmentsToBrakeBeforeCrash) {
if(numSegmentsToBrakeBeforeCrash<0) numSegmentsToBrakeBeforeCrash=0;
int old=this.numSegmentsToBrakeBeforeCrash;
this.numSegmentsToBrakeBeforeCrash = numSegmentsToBrakeBeforeCrash;
putInt("numSegmentsToBrakeBeforeCrash",numSegmentsToBrakeBeforeCrash);
getSupport().firePropertyChange("numSegmentsToBrakeBeforeCrash",old,numSegmentsToBrakeBeforeCrash);
}
/**
* @return the numSuccessfulLapsToReward
*/
public int getNumSuccessfulLapsToReward() {
return numSuccessfulLapsToReward;
}
/**
* @param numSuccessfulLapsToReward the numSuccessfulLapsToReward to set
*/
public void setNumSuccessfulLapsToReward(int numSuccessfulLapsToReward) {
if (numSuccessfulLapsToReward < 1) {
numSuccessfulLapsToReward = 1;
}
this.numSuccessfulLapsToReward = numSuccessfulLapsToReward;
putInt("numSuccessfulLapsToReward", numSuccessfulLapsToReward);
}
/**
* @return the track
*/
public SlotcarTrack getTrack() {
return trackDefineFilter.getTrack();
}
/**
* @return the fractionOfTrackToSlowDownPreCrash
*/
public float getFractionOfTrackToSlowDownPreCrash() {
return fractionOfTrackToSlowDownPreCrash;
}
/**
* @param fractionOfTrackToSlowDownPreCrash the fractionOfTrackToSlowDownPreCrash to set
*/
public void setFractionOfTrackToSlowDownPreCrash(float fractionOfTrackToSlowDownPreCrash) {
if (fractionOfTrackToSlowDownPreCrash < 0) {
fractionOfTrackToSlowDownPreCrash = 0;
} else if (fractionOfTrackToSlowDownPreCrash > 1) {
fractionOfTrackToSlowDownPreCrash = 1;
}
this.fractionOfTrackToSlowDownPreCrash = fractionOfTrackToSlowDownPreCrash;
}
/**
* @return the startingThrottleValue
*/
public float getStartingThrottleValue() {
return startingThrottleValue;
}
/**
* @return the editThrottleChange
*/
public float getEditThrottleChange() {
return editThrottleChange;
}
/**
* @param editThrottleChange the editThrottleChange to set
*/
public void setEditThrottleChange(float editThrottleChange) {
if (editThrottleChange < .001f) {
editThrottleChange = .001f;
} else if (editThrottleChange > 1) {
editThrottleChange = 1;
}
this.editThrottleChange = editThrottleChange;
putFloat("editThrottleChange", editThrottleChange);
}
/**
* @param startingThrottleValue the startingThrottleValue to set
*/
public void setStartingThrottleValue(float startingThrottleValue) {
if (startingThrottleValue < 0) {
startingThrottleValue = 0;
} else if (startingThrottleValue > 1) {
startingThrottleValue = 1;
}
this.startingThrottleValue = startingThrottleValue;
putFloat("startingThrottleValue", startingThrottleValue);
}
/** Profile of throttle values around track. */
private class ThrottleProfile implements Cloneable, Serializable {
ThrottleBrake[] throttleValues;
boolean[] spedUpSegments, slowedDownSegments;
int numPoints = 0;
/** Creates a new ThrottleProfile using existing array of throttle settings.
*
* @param throttleValues array of throttle points.
*/
public ThrottleProfile(ThrottleBrake[] throttleSettings) {
this.throttleValues = throttleSettings;
this.numPoints = throttleSettings.length;
spedUpSegments = new boolean[numPoints];
slowedDownSegments = new boolean[numPoints];
}
/** Creates a new ThrottleProfile with numPoints points.
*
* @param numPoints number of throttle points.
*/
public ThrottleProfile(int numPoints) {
super();
this.numPoints = numPoints;
throttleValues = new ThrottleBrake[numPoints];
for(int i=0;i<numPoints;i++) {
throttleValues[i]=new ThrottleBrake(defaultThrottleValue,false);
}
spedUpSegments = new boolean[numPoints];
slowedDownSegments = new boolean[numPoints];
}
@Override
public Object clone() throws CloneNotSupportedException {
ThrottleProfile newProfile = (ThrottleProfile) super.clone();
newProfile.throttleValues = new ThrottleBrake[numPoints];
for (int i = 0; i < numPoints; i++) {
newProfile.throttleValues[i] = new ThrottleBrake(throttleValues[i].throttle,throttleValues[i].brake);
}
return newProfile;
}
public ThrottleBrake getThrottle(int section) {
if (section == -1) {
return defaultThrottle;
}
return throttleValues[section];
}
public boolean getBrake(int section) {
if (section == -1) {
return false;
}
return throttleValues[section].brake;
}
/** Number of points in the throttleValues (same as number of spline points in the track). */
public int getNumPoints() {
return numPoints;
}
public ThrottleBrake[] getProfile() {
return throttleValues;
}
/** Adds a throttle bump at a random location. */
public void addBump() {
Arrays.fill(spedUpSegments, false);
// increase throttle settings around randomly around some track point
int center = getNextThrottleBumpPoint();
int m = (int) (numPoints * getFractionOfTrackToSpeedUp());
log.info("speeding up " + m + " of " + numPoints + " throttle settings around track point " + center);
for (int i = 0; i < m; i++) {
float dist = (float) Math.abs(i - m / 2);
float factor = (m / 2 - dist) / (m / 2);
int ind = getIndexFrom(center, i);
throttleValues[ind].throttle = clipThrottle(throttleValues[ind].throttle + (float) throttleChange * factor); // increase throttle by tent around random center point
throttleValues[ind].brake=false;
spedUpSegments[ind] = true;
}
}
/** Subtracts a rectangle of throttle starting at segment and continuing back for fractionOfTrackToPunish.
* The amount subtracted is a fraction of the throttleChange.
* @param segment the starting point of the subtraction, e.g. the location just before the last crash.
*/
public void subtractBump(int segment) {
Arrays.fill(slowedDownSegments, false);
int n = (int) (numPoints * fractionOfTrackToSlowDownPreCrash);
log.info("reducing throttle starting from segment " + segment);
try {
for (int i = 0; i < n; i++) {
int seg = (segment - i);
if (seg < 0) { // if segment=1, then reduce 1, 0,
seg = numPoints + seg;
}
// System.out.println("reducing "+seg);
throttleValues[seg].throttle = clipThrottle(throttleValues[seg].throttle - throttleChange / 2);
slowedDownSegments[seg] = true;
}
} catch (ArrayIndexOutOfBoundsException e) {
log.warning(e.toString());
}
}
private void addBrake(int segment) {
int n = numSegmentsToBrakeBeforeCrash;
int s = segment - numSegmentsSpacingFromCrashToBrakingPoint;
if (s < 0) {
s = numPoints + s;
}
segment = s;
log.info("braking for " + numSegmentsToBrakeBeforeCrash + " starting from segment " + segment);
try {
for (int i = 0; i < n; i++) {
int seg = (segment - i);
if (seg < 0) { // if segment=1, then reduce 1, 0,
seg = numPoints + seg;
}
// System.out.println("reducing "+seg);
throttleValues[seg].brake = true;
// slowedDownSegments[seg] = true;
}
} catch (ArrayIndexOutOfBoundsException e) {
log.warning(e.toString());
}
}
public void resetMarkedSegments() {
Arrays.fill(slowedDownSegments, false);
Arrays.fill(spedUpSegments, false);
}
/** Reduces speed on current throttleValues uniformly by throttleChange/3 */
public void slowDown() {
for (int i = 0; i < numPoints; i++) {
throttleValues[i].throttle = clipThrottle(throttleValues[i].throttle - throttleChange / 3);
}
}
/** Increases speed on current throttleValues uniformly by throttleChange/3 */
public void speedUp() {
for (int i = 0; i < numPoints; i++) {
throttleValues[i].throttle = clipThrottle(throttleValues[i].throttle + throttleChange / 3);
}
}
/** returns the segment at distance from center.
*
* @param center the center segment index of the computation.
* @param distance the distance; positive to advance, negative to retard.
* @return the segment index.
*/
private int getIndexFrom(int center, int distance) {
int index = center + distance;
if (index > numPoints - 1) {
index = index - numPoints;
} else if (index < 0) {
index = index + numPoints;
}
return index;
}
public String toString() {
StringBuilder sb = new StringBuilder("ThrottleProfile: ");
for (int i = 0; i < numPoints; i++) {
sb.append(String.format(" %d:%.2f", i, throttleValues[i].throttle));
}
return sb.toString();
}
/** Resets all throttle values to defaultThrottleValue, unsets all brake segments. */
private void reset() {
log.info("reset all throttle settings to defaultThrottle=" + defaultThrottleValue);
for(ThrottleBrake t:throttleValues){
t.set(defaultThrottleValue, false);
}
resetMarkedSegments();
}
// chooses next spot to add throttle, based on previous throttle throttleValues.
// The higher the previous throttle, the less likely to choose it.
private int getNextThrottleBumpPoint() {
// do accept/reject sampling to getString next throttle bump center point, such that
// the higher the throttle is now, the smaller the chance we increase the throttle there.
// So, we treat (1-throttleValues[i]) as a likehood of choosing a new throttle.
// We uniformly pick a bin from B in 1:numPoints and a value V in 0:1 and see if that particular
// throttleValues[B]<V then we select it as the center. That way, the higher the throttle,
// the less the chance to selecting that location to be the center of the next bump.
int tries = numPoints * 3;
while (tries-- > 0) {
float v = random.nextFloat();
int b = random.nextInt(numPoints);
if (throttleValues[b].throttle < v) {
return b;
}
}
return random.nextInt(numPoints); //. give up and just choose one uniformly
}
private void editToggleBrake(int idx){
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].brake = !throttleValues[idx].brake;
}
private void editSetBrake(int idx){
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].brake = true;
}
private void editClearBrake(int idx){
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].brake = false;
}
private void editIncreaseThrottle(int idx) {
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].throttle = min(throttleValues[idx].throttle + editThrottleChange, 1);
}
private void editDecreaseThrottle(int idx) {
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].throttle = max(throttleValues[idx].throttle - editThrottleChange, 0);
}
private void guessThrottleFromTrackModel() {
if (getTrack() == null) {
log.warning("null track");
return;
}
getTrack().updateCurvature();
float[] curvatures = getTrack().getCurvatureAtPoints();
for (int i = 0; i < curvatures.length; i++) {
curvatures[i] = (float) Math.abs(curvatures[i]);
}
final int nfilt = numPoints / 30;
float[] smoothed = new float[curvatures.length];
for (int i = nfilt - 1; i < curvatures.length; i++) {
float s = 0;
for (int j = 0; j < nfilt; j++) {
s += curvatures[i - j];
}
s /= nfilt;
smoothed[i] = s;
}
for (int i = 0; i < nfilt - 1; i++) {
smoothed[i] = curvatures[i]; // TODO no filter here yet
}
float minCurv = Float.MAX_VALUE;
for (float c : smoothed) {
if (c < minCurv) {
minCurv = c;
}
}
float maxCurv = Float.MIN_VALUE;
for (float c : smoothed) {
if (c > maxCurv) {
maxCurv = c;
}
}
for (int idx = 0; idx < numPoints; idx++) {
int shiftedIdx = idx - nfilt;
if (shiftedIdx < 0) {
shiftedIdx = numPoints + shiftedIdx;
}
throttleValues[shiftedIdx].throttle = min(1, startingThrottleValue * 2 * (float) Math.pow((smoothed[idx] / maxCurv), .15));
}
}
} // ThrottleProfile
private float min(float a, float b) {
return a < b ? a : b;
}
private float max(float a, float b) {
return a > b ? a : b;
}
// mouse control of throttle throttleValues
@Override
public void setSelected(boolean yes) {
super.setSelected(yes);
if (glCanvas == null) {
return;
}
if (yes) {
glCanvas.addMouseListener(this);
glCanvas.addMouseMotionListener(this);
} else {
glCanvas.removeMouseListener(this);
glCanvas.removeMouseMotionListener(this);
}
}
@Override
public void mouseClicked(MouseEvent e) {
}
@Override
public void mousePressed(MouseEvent e) {
}
@Override
public void mouseReleased(MouseEvent e) {
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
}
// helpers
private Point getPixel(MouseEvent e) {
if (canvas == null) {
return null;
}
Point p = canvas.getPixelFromMouseEvent(e);
if (canvas.wasMousePixelInsideChipBounds()) {
return p;
} else {
return null;
}
}
private boolean isShift(MouseEvent e) {
if (e.isShiftDown() && !e.isControlDown() && !e.isAltDown()) {
return true;
} else {
return false;
}
}
private boolean isControl(MouseEvent e) {
if (!e.isShiftDown() && e.isControlDown() && !e.isAltDown()) {
return true;
} else {
return false;
}
}
private int getIndex(MouseEvent e) {
if (getTrack() == null) {
log.warning("null track model");
return -1;
}
Point p = getPixel(e);
if (p == null) {
return -1;
}
return getTrack().findClosestIndex(p, 0, true);
}
private int lastEditIdx = -1;
enum EditState {
Increae, Decrease, None
};
volatile EditState editState = EditState.None;
@Override
public void mouseDragged(MouseEvent e) {
if (currentProfile == null) {
return;
}
int idx = -1;
if ((idx = getIndex(e)) == -1) {
return;
}
if (idx != lastEditIdx) {
if(e.isAltDown() && e.isShiftDown()){
// brake point
currentProfile.editClearBrake(idx);
editState = EditState.None;
glCanvas.repaint();
} else if(e.isAltDown() && !e.isShiftDown()){
// brake point
currentProfile.editSetBrake(idx);
editState = EditState.None;
glCanvas.repaint();
} else if(isShift(e)) {
currentProfile.editIncreaseThrottle(idx);
editState = EditState.Increae;
glCanvas.repaint();
} else if (isControl(e)) {
currentProfile.editDecreaseThrottle(idx);
editState = EditState.Decrease;
glCanvas.repaint();
} else {
editState = EditState.None;
}
}
lastEditIdx = idx;
}
@Override
public void mouseMoved(MouseEvent e) {
if (isShift(e)) {
editState = EditState.Increae;
} else if (isControl(e)) {
editState = EditState.Decrease;
} else {
editState = EditState.None;
}
}
private boolean hasBlendChecked = false;
private boolean hasBlend = false;
// GLUT glut=new GLUT();
/** Displays the extracted track points */
private void drawThrottlePainter(GLAutoDrawable drawable) {
if (isSelected() && getTrack() != null && getTrack().getPointList() != null && currentProfile != null) {
Point mp = glCanvas.getMousePosition();
Point p = canvas.getPixelFromPoint(mp);
if (p == null) {
return;
}
GL gl = drawable.getGL();
if (!hasBlendChecked) {
hasBlendChecked = true;
String glExt = gl.glGetString(GL.GL_EXTENSIONS);
if (glExt.indexOf("GL_EXT_blend_color") != -1) {
hasBlend = true;
}
}
if (hasBlend) {
try {
gl.glEnable(GL.GL_BLEND);
gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA);
gl.glBlendEquation(GL.GL_FUNC_ADD);
} catch (GLException e) {
e.printStackTrace();
hasBlend = false;
}
}
switch (editState) {
case None:
gl.glColor4f(.25f, .25f, 0, .3f);
break;
case Increae:
gl.glColor4f(0, .45f, 0, .5f);
break;
case Decrease:
gl.glColor4f(.45f, .0f, 0, .5f);
}
gl.glPushMatrix();
gl.glTranslatef(p.x, p.y, 0);
if (quad == null) {
quad = glu.gluNewQuadric();
}
glu.gluQuadricDrawStyle(quad, GLU.GLU_FILL);
glu.gluDisk(quad, 0, 5, 32, 1);
gl.glPopMatrix();
chip.getCanvas().checkGLError(gl, glu, "in drawThrottlePainterk");
}
}
}
| src/ch/unizh/ini/jaer/projects/virtualslotcar/EvolutionaryThrottleController.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package ch.unizh.ini.jaer.projects.virtualslotcar;
import com.sun.opengl.util.GLUT;
import com.sun.opengl.util.j2d.TextRenderer;
import java.awt.Color;
import java.awt.Font;
import java.awt.Point;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import javax.media.opengl.GLCanvas;
import javax.media.opengl.GLException;
import net.sf.jaer.graphics.ChipCanvas;
import net.sf.jaer.graphics.MultilineAnnotationTextRenderer;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Random;
import javax.media.opengl.GL;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.glu.GLU;
import javax.media.opengl.glu.GLUquadric;
import net.sf.jaer.chip.AEChip;
import net.sf.jaer.event.EventPacket;
import net.sf.jaer.eventprocessing.FilterChain;
import net.sf.jaer.graphics.FrameAnnotater;
import net.sf.jaer.util.StateMachineStates;
/**
* Learns the throttle at different part of the track.
* <p>
* After a discussion with Garrick and Tobias Glassmachers we decided to go
* for coding up a learning approach that saves successful
* ThrottleSetting profiles if the car makes it around the track twice, and then randomly perturbs the
* throttleValues to increase the throttle smoothly somewhere along the track. If the change causes a
* crash, we go back to the saved throttleValues and perturb again, using a random bump
* of throttle increase somewhere on the track. This approach will guarantee increase in
* speed and will always eventually cause a crash but we can add a button to go back to the last
* successful throttleValues. The track model is the basis of this because it tells us where we are.
*
* @author Juston, Tobi
*/
public class EvolutionaryThrottleController extends AbstractSlotCarController implements SlotCarControllerInterface, FrameAnnotater, MouseListener, MouseMotionListener, PropertyChangeListener {
public static String getDescription() {
return "Evolution-based slot car throttle controller";
}
// prefs
private int numSegmentsToBrakeBeforeCrash=getInt("numSegmentsToBrakeBeforeCrash",2);
private float fractionOfTrackToSpeedUp = getFloat("fractionOfTrackToSpeedUp", 0.3f);
private float fractionOfTrackToSlowDownPreCrash = getFloat("fractionOfTrackToSlowDownPreCrash", .15f);
private float defaultThrottleValue = getFloat("defaultThrottle", .1f); // default throttle setting if no car is detected
private ThrottleBrake defaultThrottle=new ThrottleBrake(defaultThrottleValue, false);
private boolean learningEnabled = getBoolean("learningEnabled", false);
private float throttleChange = getFloat("throttleChange", 0.03f);
private float editThrottleChange = getFloat("editThrottleChange", 0.2f);
private int numSuccessfulLapsToReward = getInt("numSuccessfulLapsToReward", 2);
private float startingThrottleValue = getFloat("startingThrottleValue", .1f);
private boolean showThrottleProfile = getBoolean("showThrottleProfile", true);
private GLUT glut=new GLUT();
/** possible states,
* <ol>
* <li> STARTING means no car is tracked or tracker has not found a car cluster near the track model,
* <li> RUNNING is the active state,
* <li> CRASHED is the state if we were RUNNING and the car tracker has tracked the car
* sufficiently far away from the track model,
* <li> STALLED is the state if the car has stopped being tracked but the last tracked position was on the track
* because it has stalled out and stopped moving. is after there have not been any definite balls for a while and we are waiting for a clear ball directed
* </ol>
*/
public enum State {
OVERRIDDEN, STARTING, RUNNING, CRASHED
}
protected class RacerState extends StateMachineStates {
State state = State.STARTING;
@Override
public Enum getInitial() {
return State.STARTING;
}
}
private RacerState state = new RacerState();
// vars
private ThrottleBrake throttle = new ThrottleBrake(); // last output throttle setting
private int currentTrackPos; // position in spline parameter of track
private int lastRewardLap = 0;
private ThrottleProfile currentProfile, lastSuccessfulProfile, lastSuccessfulProfileEvenOlder;
private Random random = new Random();
LapTimer lapTimer = null;
private int lapTime;
private int prevLapTime;
private TrackdefineFilter trackDefineFilter;
private FilterChain filterChain;
private TwoCarTracker carTracker;
private TwoCarTracker.TwoCarCluster car = null;
private boolean showedMissingTrackWarning = false;
private SlotcarSoundEffects sounds = null;
private int lastCrashLocation = -1;
private GLCanvas glCanvas;
private ChipCanvas canvas;
public EvolutionaryThrottleController(AEChip chip) {
super(chip);
final String s = "EvolutionaryThrottleController";
setPropertyTooltip(s, "defaultThrottle", "default throttle setting if no car is detected; also starting throttle after resetting learning and minimum allowed throttle");
setPropertyTooltip(s, "fractionOfTrackToPunish", "fraction of track to reduce throttle and mark for no reward");
setPropertyTooltip(s, "learningEnabled", "enable evolution - successful profiles are sped up, crashes cause reversion to last successful profile");
setPropertyTooltip(s, "throttleChange", "max amount to increase throttle for perturbation");
setPropertyTooltip(s, "editThrottleChange", "amount to change throttle for mouse edits of the throttle profile");
setPropertyTooltip(s, "numSuccessfulLapsToReward", "number of successful (no crash) laps between rewards");
setPropertyTooltip(s, "numSegmentsToBrakeBeforeCrash", "number track segments to brake for just prior to crash location");
setPropertyTooltip(s, "fractionOfTrackToSpeedUp", "fraction of track spline points to increase throttle on after successful laps");
setPropertyTooltip(s, "fractionOfTrackToSlowDownPreCrash", "fraction of track spline points before crash point to reduce throttle on");
setPropertyTooltip(s, "startingThrottleValue", "throttle value when starting (no car cluster detected)");
setPropertyTooltip(s, "showThrottleProfile", "displays the throttle profile, with dot size reprenting the throttle value");
// do methods
setPropertyTooltip(s, "guessThrottleFromTrackModel", "guess initial throttle profile from track model");
setPropertyTooltip(s, "resetAllThrottleValues", "reset all profile points to defaultThrottle");
setPropertyTooltip(s, "loadThrottleSettings", "load profile from preferences");
setPropertyTooltip(s, "saveThrottleSettings", "save profile to preferences");
setPropertyTooltip(s, "revertToLastSuccessfulProfile", "explicitly revert profile to last one that made it around the track at least numSuccessfulLapsToReward");
setPropertyTooltip(s, "slowDown", "reduce all profile point throttle settings");
setPropertyTooltip(s, "speedUp", "increase all profile point throttle settings");
doLoadThrottleSettings();
filterChain = new FilterChain(chip);
trackDefineFilter = new TrackdefineFilter(chip);
trackDefineFilter.setEnclosed(true, this);
carTracker = new TwoCarTracker(chip);
carTracker.setTrack(trackDefineFilter.getTrack());
carTracker.setEnclosed(true, this);
carTracker.addObserver(trackDefineFilter); // so that track define filter can getString the tracker output
filterChain.add(trackDefineFilter);
filterChain.add(carTracker);
trackDefineFilter.getSupport().addPropertyChangeListener(SlotcarTrack.EVENT_TRACK_CHANGED, this);
trackDefineFilter.getSupport().addPropertyChangeListener(SlotcarTrack.EVENT_TRACK_CHANGED, carTracker);
lapTimer = new LapTimer(getTrack());
trackDefineFilter.getSupport().addPropertyChangeListener(SlotcarTrack.EVENT_TRACK_CHANGED, lapTimer);
setEnclosedFilterChain(filterChain);
try {
sounds = new SlotcarSoundEffects(0);
} catch (Exception ex) {
log.warning("No sound effects available: " + ex.toString());
}
if (chip.getCanvas() != null && chip.getCanvas().getCanvas() != null) {
glCanvas = (GLCanvas) chip.getCanvas().getCanvas();
}
}
@Override
public EventPacket<?> filterPacket(EventPacket<?> in) {
if (trackDefineFilter.getTrack() != null && (currentProfile == null || currentProfile.getNumPoints() != getTrack().getNumPoints())) {
currentProfile = new ThrottleProfile(getTrack().getNumPoints());
log.info("made a new ThrottleProfile :" + currentProfile);
}
out = getEnclosedFilterChain().filterPacket(in); // does cartracker and maybe trackdefinefilter
car = carTracker.findCarCluster();
if (car != null) {
currentTrackPos = car.segmentIdx;
}
// choose state & set throttle
float prevThrottle = throttle.throttle;
if (state.get() == State.OVERRIDDEN) {
// throttle.throttle = getStartingThrottleValue();
} else if (state.get() == State.STARTING) {
// throttle.throttle = getStartingThrottleValue();
if (car != null && car.isRunning()) {
state.set(State.RUNNING);
}
} else if (state.get() == State.RUNNING) {
if (trackDefineFilter.getTrack() == null) {
if (!showedMissingTrackWarning) {
log.warning("Track not defined yet. Use the TrackdefineFilter to extract the slot car track or load the track from a file.");
}
showedMissingTrackWarning = true;
} else {
if (car != null && !car.crashed) {
// did we lap?
boolean lapped = lapTimer.update(currentTrackPos, car.getLastEventTimestamp());
if (lapped) {
lapTime = lapTimer.getLastLap().laptimeUs;
int dt = lapTime - prevLapTime;
if (dt < 0) {
log.info("lap time improved by " + dt / 1000 + " ms");
} else if(dt>0) {
log.info("lap time worsened by " + dt / 1000 + " ms");
}
prevLapTime = lapTime;
}
if (learningEnabled && lapTimer.lapCounter - lastRewardLap > numSuccessfulLapsToReward) {
try {
log.info("successfully drove " + lapTimer.lapCounter + " laps; cloning this profile and rewarding currentProfile");
if (lastSuccessfulProfile != null) {
lastSuccessfulProfileEvenOlder = (ThrottleProfile) lastSuccessfulProfile.clone(); // save backup copy of last successfull
}
if (currentProfile != null) {
lastSuccessfulProfile = (ThrottleProfile) currentProfile.clone(); // save current as successful
}
} catch (CloneNotSupportedException e) {
throw new RuntimeException("couldn't clone the current throttle profile: " + e);
}
currentProfile.addBump();
lastRewardLap = lapTimer.lapCounter;
}
}
if (carTracker.getCrashedCar() != null) {
state.set(State.CRASHED);
lastCrashLocation = car.crashSegment;
// throttle.throttle = getStartingThrottleValue(); // don't actually change profile, starting comes from getThrottle
sounds.play();
if (learningEnabled) {
if (lastSuccessfulProfile != null && currentProfile != lastSuccessfulProfile) {
log.info("crashed at segment" + lastCrashLocation + ", switching back to previous profile");
currentProfile = lastSuccessfulProfile;
}
if(numSegmentsToBrakeBeforeCrash>0){
currentProfile.addBrake(carTracker.getCrashedCar().crashSegment);
}else{
currentProfile.subtractBump(carTracker.getCrashedCar().crashSegment);
}
}
lastRewardLap = lapTimer.lapCounter; // don't reward until we make some laps from here
} else {
throttle = currentProfile.getThrottle(car.segmentIdx);
}
}
} else if (state.get() == State.CRASHED) {
// throttle.throttle = getStartingThrottleValue();
state.set(State.STARTING);
}
setBigStatusText(state.toString(), Color.RED);
return out;
}
private TextRenderer statusRenderer = null;
private Color bigStatusColor = Color.WHITE;
private String bigStatusText = null;
synchronized private void setBigStatusText(String s, Color c) {
bigStatusText = s;
bigStatusColor = c;
}
synchronized private void renderBigStatusText(GLAutoDrawable drawable) {
if (bigStatusText == null) {
return;
}
if (statusRenderer == null) {
statusRenderer = new TextRenderer(new Font("Serif", Font.BOLD, 60));
}
statusRenderer.setColor(bigStatusColor);
Rectangle2D bounds = statusRenderer.getBounds(bigStatusText);
statusRenderer.beginRendering(drawable.getWidth(), drawable.getHeight());
statusRenderer.draw(bigStatusText, (int) (drawable.getWidth() / 2 - bounds.getWidth() / 2), (int) (drawable.getHeight() / 2 - bounds.getHeight() / 2));
statusRenderer.endRendering();
}
/** Computes throttle using tracker output and ThrottleProfile.
*
* @param tracker
* @param track
* @return the throttle from 0-1.
*/
synchronized public ThrottleBrake computeControl(CarTracker tracker, SlotcarTrack track) {
return throttle;
}
synchronized public void doResetAllThrottleValues() {
if (currentProfile == null) {
log.warning("cannot reset until profile exists");
return;
}
currentProfile.reset();
}
synchronized public void doGuessThrottleFromTrackModel() {
if (currentProfile == null) {
log.warning("cannot guess until profile exists");
return;
}
currentProfile.guessThrottleFromTrackModel();
}
synchronized public void doSaveThrottleSettings() {
if (currentProfile == null) {
log.warning("no profile to save");
return;
}
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(currentProfile.numPoints);
oos.writeObject(currentProfile.throttleValues);
prefs().putByteArray("EvolutionaryThrottleController.throttleProfile", bos.toByteArray());
oos.close();
bos.close();
log.info("throttle settings saveed to preferences");
} catch (Exception e) {
log.warning("couldn't save profile: " + e);
}
}
public final synchronized void doLoadThrottleSettings() {
try {
byte[] b = prefs().getByteArray("EvolutionaryThrottleController.throttleProfile", null);
if (b == null) {
log.info("no throttle settings saved in preferences, can't load them");
return;
}
ByteArrayInputStream bis = new ByteArrayInputStream(b);
ObjectInputStream ois = new ObjectInputStream(bis);
Object o = ois.readObject();
if (o == null) {
throw new NullPointerException("Couldn't read Integer number of throttle points from preferences");
}
int n = ((Integer) o).intValue();
o = ois.readObject();
if (o == null) {
throw new NullPointerException("Couldn't read float array of throttle points from preferences");
}
ThrottleBrake[] f = (ThrottleBrake[]) o;
currentProfile = new ThrottleProfile(f);
ois.close();
bis.close();
log.info("loaded throttle profile from preferencdes: " + currentProfile);
} catch (Exception e) {
log.warning("couldn't load throttle profile: " + e);
}
}
synchronized public void doSlowDown() {
if (currentProfile != null) {
currentProfile.slowDown();
log.info("slowed down current profile to " + currentProfile);
}
}
synchronized public void doSpeedUp() {
if (currentProfile != null) {
currentProfile.speedUp();
log.info("speeded up current profile to " + currentProfile);
}
}
synchronized public void doRevertToLastSuccessfulProfile() {
if (lastSuccessfulProfileEvenOlder != null) {
currentProfile = lastSuccessfulProfileEvenOlder;
log.info("reverted to " + lastSuccessfulProfileEvenOlder);
} else {
log.info("cannot revert - no lastSuccessfulProfileEvenOlder stored yet");
}
}
private float clipThrottle(float t) {
if (t > 1) {
t = 1;
} else if (t < defaultThrottleValue) {
t = defaultThrottleValue;
}
return t;
}
final ThrottleBrake startingThrottle=new ThrottleBrake(startingThrottleValue,false);
public ThrottleBrake getThrottle() {
Enum s = state.get();
if (s == State.RUNNING) {
return throttle;
} else if (s == State.CRASHED || s == State.STARTING) {
startingThrottle.throttle = startingThrottleValue;
return startingThrottle;
} else if (s == State.OVERRIDDEN) {
startingThrottle.throttle = defaultThrottleValue;
return startingThrottle;
} else {
throw new Error("state not found for RacerState, shouldn't happen");
}
}
@Override
public String logControllerState() {
return String.format("%s\t%d\t%f\t%s", state, currentTrackPos, throttle, car);
}
@Override
public String logContents() {
return "state currentTrackPos throttle car ";
}
@Override
public void resetFilter() {
state.set(State.STARTING);
lapTimer.reset();
getEnclosedFilterChain().reset();
lastCrashLocation = -1;
if (currentProfile != null) {
currentProfile.resetMarkedSegments();
}
lastRewardLap = 0;
state.set(State.STARTING);
}
@Override
public void initFilter() {
}
@Override
public synchronized void setFilterEnabled(boolean yes) {
super.setFilterEnabled(yes);
trackDefineFilter.setFilterEnabled(false); // don't enable by default
}
@Override
synchronized public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName() == SlotcarTrack.EVENT_TRACK_CHANGED) {
SlotcarTrack track = (SlotcarTrack) evt.getNewValue();
if (currentProfile==null || track.getNumPoints() != currentProfile.getNumPoints()) {
log.warning("new track has different number of points than current throttle profile, making a new default profile");
currentProfile = new ThrottleProfile(track.getNumPoints());
}
}
}
/**
* @return the defaultThrottle
*/
public float getDefaultThrottle() {
return defaultThrottleValue;
}
/**
* @param defaultThrottle the defaultThrottle to set
*/
public void setDefaultThrottle(float defaultThrottle) {
this.defaultThrottleValue = defaultThrottle;
putFloat("defaultThrottle", defaultThrottle);
}
GLU glu = new GLU();
GLUquadric quad = null;
@Override
public void annotate(GLAutoDrawable drawable) {
String s = String.format("EvolutionaryThrottleController\nState: %s\ncurrentTrackPos: %d\nThrottle: %8.3f\n%s", state.toString(), currentTrackPos, throttle.throttle, lapTimer.toString());
// if(state.getString()==State.CRASHED){
//
// }else if(state.getString()==State.RUNNING){
//
// }else{
// }
MultilineAnnotationTextRenderer.renderMultilineString(s);
if (showThrottleProfile) {
drawThrottleProfile(drawable.getGL());
}
drawCurrentTrackPoint(drawable.getGL());
drawLastCrashLocation(drawable.getGL());
canvas = chip.getCanvas();
glCanvas = (GLCanvas) canvas.getCanvas();
drawThrottlePainter(drawable);
renderBigStatusText(drawable);
}
/** Displays the extracted track points */
private void drawThrottleProfile(GL gl) {
if (getTrack() != null && getTrack().getPointList() != null && currentProfile != null) {
// Plot lines
gl.glColor4f(.5f, 0, 0, .5f);
gl.glLineWidth(.5f);
gl.glBegin(gl.GL_LINE_STRIP);
for (Point2D p : getTrack().getPointList()) {
gl.glVertex2d(p.getX(), p.getY());
}
gl.glEnd();
// plot throttle values and braking locations
gl.glColor4f(.5f, 0, 0, .5f);
// Draw extracted points
float maxSize = 40f;
int idx = 0;
for (Point2D p : getTrack().getPointList()) {
float size = maxSize * currentProfile.getThrottle(idx).throttle;
if (size < 1) {
size = 1;
}
if (currentProfile.getBrake(idx)) {
// if braking segment, we draw X there, in orange
gl.glColor4f(.5f,.25f,0,.5f);
gl.glPushMatrix();
gl.glTranslatef((float)p.getX(), (float)p.getY(), 0);
final int scale=2;
gl.glLineWidth(3);
gl.glBegin(GL.GL_LINES);
gl.glVertex2f(-scale,-scale);
gl.glVertex2f(scale,scale);
gl.glVertex2f(scale,-scale);
gl.glVertex2f(-scale,scale);
gl.glEnd();
gl.glPopMatrix();
} else {
// throttle value and if sped up or slowed down
gl.glPointSize(size);
float rgb[] = {0, 0, .5f};
if (currentProfile.spedUpSegments[idx]) {
rgb[1] = 1; // green was sped up
}
if (currentProfile.slowedDownSegments[idx]) {
rgb[0] = 1; // red was slowed down
}
gl.glColor3fv(rgb, 0);
gl.glBegin(gl.GL_POINTS);
gl.glVertex2d(p.getX(), p.getY());
gl.glEnd();
}
idx++;
}
}
chip.getCanvas().checkGLError(gl, glu, "in TrackdefineFilter.drawThrottleProfile");
}
private TextRenderer textRenderer = null;
private void drawCurrentTrackPoint(GL gl) {
if (currentTrackPos == -1 || getTrack() == null) {
return;
}
gl.glColor4f(1, 0, 0, .5f);
Point2D p = getTrack().getPoint(currentTrackPos);
gl.glRectd(p.getX() - 1, p.getY() - 1, p.getX() + 1, p.getY() + 1);
}
private void drawLastCrashLocation(GL gl) {
if (lastCrashLocation == -1) {
return;
}
if (textRenderer == null) {
textRenderer = new TextRenderer(new Font("SansSerif", Font.PLAIN, 24), true, true);
}
textRenderer.setColor(Color.yellow);
textRenderer.begin3DRendering();
Point2D p = getTrack().getPoint(lastCrashLocation);
textRenderer.draw3D("last crash", (float) p.getX(), (float) p.getY(), 0, .2f);
textRenderer.end3DRendering();
gl.glPointSize(10);
gl.glColor3f(1, 0, 0);
gl.glBegin(GL.GL_POINTS);
gl.glVertex2d(p.getX(), p.getY());
gl.glEnd();
}
/**
* @return the showThrottleProfile
*/
public boolean isShowThrottleProfile() {
return showThrottleProfile;
}
/**
* @param showThrottleProfile the showThrottleProfile to set
*/
public void setShowThrottleProfile(boolean showThrottleProfile) {
this.showThrottleProfile = showThrottleProfile;
}
/**
* @return the learning
*/
public boolean isLearningEnabled() {
return learningEnabled;
}
/**
* @param learning the learning to set
*/
public void setLearningEnabled(boolean learning) {
this.learningEnabled = learning;
putBoolean("learningEnabled", learningEnabled);
}
/**
* @return the throttlePunishment
*/
public float getThrottleChange() {
return throttleChange;
}
/**
* @param change the throttlePunishment to set
*/
public void setThrottleChange(float change) {
if (change > 1) {
change = 1;
} else if (change < 0) {
change = 0;
}
this.throttleChange = change;
putFloat("throttleChange", throttleChange);
}
/**
* @return the fractionOfTrackToPunish
*/
public float getFractionOfTrackToSpeedUp() {
return fractionOfTrackToSpeedUp;
}
/**
* @param fractionOfTrackToSpeedUp the fractionOfTrackToPunish to set
*/
synchronized public void setFractionOfTrackToSpeedUp(float fractionOfTrackToSpeedUp) {
if (fractionOfTrackToSpeedUp < 0) {
fractionOfTrackToSpeedUp = 0;
} else if (fractionOfTrackToSpeedUp > 1) {
fractionOfTrackToSpeedUp = 1;
}
this.fractionOfTrackToSpeedUp = fractionOfTrackToSpeedUp;
putFloat("fractionOfTrackToSpeedUp", fractionOfTrackToSpeedUp);
}
/**
* @return the numSegmentsToBrakeBeforeCrash
*/
public int getNumSegmentsToBrakeBeforeCrash() {
return numSegmentsToBrakeBeforeCrash;
}
/**
* @param numSegmentsToBrakeBeforeCrash the numSegmentsToBrakeBeforeCrash to set
*/
public void setNumSegmentsToBrakeBeforeCrash(int numSegmentsToBrakeBeforeCrash) {
if(numSegmentsToBrakeBeforeCrash<0) numSegmentsToBrakeBeforeCrash=0;
int old=this.numSegmentsToBrakeBeforeCrash;
this.numSegmentsToBrakeBeforeCrash = numSegmentsToBrakeBeforeCrash;
putInt("numSegmentsToBrakeBeforeCrash",numSegmentsToBrakeBeforeCrash);
getSupport().firePropertyChange("numSegmentsToBrakeBeforeCrash",old,numSegmentsToBrakeBeforeCrash);
}
/**
* @return the numSuccessfulLapsToReward
*/
public int getNumSuccessfulLapsToReward() {
return numSuccessfulLapsToReward;
}
/**
* @param numSuccessfulLapsToReward the numSuccessfulLapsToReward to set
*/
public void setNumSuccessfulLapsToReward(int numSuccessfulLapsToReward) {
if (numSuccessfulLapsToReward < 1) {
numSuccessfulLapsToReward = 1;
}
this.numSuccessfulLapsToReward = numSuccessfulLapsToReward;
putInt("numSuccessfulLapsToReward", numSuccessfulLapsToReward);
}
/**
* @return the track
*/
public SlotcarTrack getTrack() {
return trackDefineFilter.getTrack();
}
/**
* @return the fractionOfTrackToSlowDownPreCrash
*/
public float getFractionOfTrackToSlowDownPreCrash() {
return fractionOfTrackToSlowDownPreCrash;
}
/**
* @param fractionOfTrackToSlowDownPreCrash the fractionOfTrackToSlowDownPreCrash to set
*/
public void setFractionOfTrackToSlowDownPreCrash(float fractionOfTrackToSlowDownPreCrash) {
if (fractionOfTrackToSlowDownPreCrash < 0) {
fractionOfTrackToSlowDownPreCrash = 0;
} else if (fractionOfTrackToSlowDownPreCrash > 1) {
fractionOfTrackToSlowDownPreCrash = 1;
}
this.fractionOfTrackToSlowDownPreCrash = fractionOfTrackToSlowDownPreCrash;
}
/**
* @return the startingThrottleValue
*/
public float getStartingThrottleValue() {
return startingThrottleValue;
}
/**
* @return the editThrottleChange
*/
public float getEditThrottleChange() {
return editThrottleChange;
}
/**
* @param editThrottleChange the editThrottleChange to set
*/
public void setEditThrottleChange(float editThrottleChange) {
if (editThrottleChange < .001f) {
editThrottleChange = .001f;
} else if (editThrottleChange > 1) {
editThrottleChange = 1;
}
this.editThrottleChange = editThrottleChange;
putFloat("editThrottleChange", editThrottleChange);
}
/**
* @param startingThrottleValue the startingThrottleValue to set
*/
public void setStartingThrottleValue(float startingThrottleValue) {
if (startingThrottleValue < 0) {
startingThrottleValue = 0;
} else if (startingThrottleValue > 1) {
startingThrottleValue = 1;
}
this.startingThrottleValue = startingThrottleValue;
putFloat("startingThrottleValue", startingThrottleValue);
}
/** Profile of throttle values around track. */
private class ThrottleProfile implements Cloneable, Serializable {
ThrottleBrake[] throttleValues;
boolean[] spedUpSegments, slowedDownSegments;
int numPoints = 0;
/** Creates a new ThrottleProfile using existing array of throttle settings.
*
* @param throttleValues array of throttle points.
*/
public ThrottleProfile(ThrottleBrake[] throttleSettings) {
this.throttleValues = throttleSettings;
this.numPoints = throttleSettings.length;
spedUpSegments = new boolean[numPoints];
slowedDownSegments = new boolean[numPoints];
}
/** Creates a new ThrottleProfile with numPoints points.
*
* @param numPoints number of throttle points.
*/
public ThrottleProfile(int numPoints) {
super();
this.numPoints = numPoints;
throttleValues = new ThrottleBrake[numPoints];
for(int i=0;i<numPoints;i++) {
throttleValues[i]=new ThrottleBrake(defaultThrottleValue,false);
}
spedUpSegments = new boolean[numPoints];
slowedDownSegments = new boolean[numPoints];
}
@Override
public Object clone() throws CloneNotSupportedException {
ThrottleProfile newProfile = (ThrottleProfile) super.clone();
newProfile.throttleValues = new ThrottleBrake[numPoints];
for (int i = 0; i < numPoints; i++) {
newProfile.throttleValues[i] = new ThrottleBrake(throttleValues[i].throttle,throttleValues[i].brake);
}
return newProfile;
}
public ThrottleBrake getThrottle(int section) {
if (section == -1) {
return defaultThrottle;
}
return throttleValues[section];
}
public boolean getBrake(int section) {
if (section == -1) {
return false;
}
return throttleValues[section].brake;
}
/** Number of points in the throttleValues (same as number of spline points in the track). */
public int getNumPoints() {
return numPoints;
}
public ThrottleBrake[] getProfile() {
return throttleValues;
}
/** Adds a throttle bump at a random location. */
public void addBump() {
Arrays.fill(spedUpSegments, false);
// increase throttle settings around randomly around some track point
int center = getNextThrottleBumpPoint();
int m = (int) (numPoints * getFractionOfTrackToSpeedUp());
log.info("speeding up " + m + " of " + numPoints + " throttle settings around track point " + center);
for (int i = 0; i < m; i++) {
float dist = (float) Math.abs(i - m / 2);
float factor = (m / 2 - dist) / (m / 2);
int ind = getIndexFrom(center, i);
throttleValues[ind].throttle = clipThrottle(throttleValues[ind].throttle + (float) throttleChange * factor); // increase throttle by tent around random center point
throttleValues[ind].brake=false;
spedUpSegments[ind] = true;
}
}
/** Subtracts a rectangle of throttle starting at segment and continuing back for fractionOfTrackToPunish.
* The amount subtracted is a fraction of the throttleChange.
* @param segment the starting point of the subtraction, e.g. the location just before the last crash.
*/
public void subtractBump(int segment) {
Arrays.fill(slowedDownSegments, false);
int n = (int) (numPoints * fractionOfTrackToSlowDownPreCrash);
log.info("reducing throttle starting from segment " + segment);
try {
for (int i = 0; i < n; i++) {
int seg = (segment - i);
if (seg < 0) { // if segment=1, then reduce 1, 0,
seg = numPoints + seg;
}
// System.out.println("reducing "+seg);
throttleValues[seg].throttle = clipThrottle(throttleValues[seg].throttle - throttleChange / 2);
slowedDownSegments[seg] = true;
}
} catch (ArrayIndexOutOfBoundsException e) {
log.warning(e.toString());
}
}
private void addBrake(int segment) {
int n = numSegmentsToBrakeBeforeCrash;
log.info("braking for "+numSegmentsToBrakeBeforeCrash+" starting from segment " + segment);
try {
for (int i = 0; i < n; i++) {
int seg = (segment - i);
if (seg < 0) { // if segment=1, then reduce 1, 0,
seg = numPoints + seg;
}
// System.out.println("reducing "+seg);
throttleValues[seg].brake = true;
// slowedDownSegments[seg] = true;
}
} catch (ArrayIndexOutOfBoundsException e) {
log.warning(e.toString());
}
}
public void resetMarkedSegments() {
Arrays.fill(slowedDownSegments, false);
Arrays.fill(spedUpSegments, false);
}
/** Reduces speed on current throttleValues uniformly by throttleChange/3 */
public void slowDown() {
for (int i = 0; i < numPoints; i++) {
throttleValues[i].throttle = clipThrottle(throttleValues[i].throttle - throttleChange / 3);
}
}
/** Increases speed on current throttleValues uniformly by throttleChange/3 */
public void speedUp() {
for (int i = 0; i < numPoints; i++) {
throttleValues[i].throttle = clipThrottle(throttleValues[i].throttle + throttleChange / 3);
}
}
/** returns the segment at distance from center.
*
* @param center the center segment index of the computation.
* @param distance the distance; positive to advance, negative to retard.
* @return the segment index.
*/
private int getIndexFrom(int center, int distance) {
int index = center + distance;
if (index > numPoints - 1) {
index = index - numPoints;
} else if (index < 0) {
index = index + numPoints;
}
return index;
}
public String toString() {
StringBuilder sb = new StringBuilder("ThrottleProfile: ");
for (int i = 0; i < numPoints; i++) {
sb.append(String.format(" %d:%.2f", i, throttleValues[i].throttle));
}
return sb.toString();
}
/** Resets all throttle values to defaultThrottleValue, unsets all brake segments. */
private void reset() {
log.info("reset all throttle settings to defaultThrottle=" + defaultThrottleValue);
for(ThrottleBrake t:throttleValues){
t.set(defaultThrottleValue, false);
}
resetMarkedSegments();
}
// chooses next spot to add throttle, based on previous throttle throttleValues.
// The higher the previous throttle, the less likely to choose it.
private int getNextThrottleBumpPoint() {
// do accept/reject sampling to getString next throttle bump center point, such that
// the higher the throttle is now, the smaller the chance we increase the throttle there.
// So, we treat (1-throttleValues[i]) as a likehood of choosing a new throttle.
// We uniformly pick a bin from B in 1:numPoints and a value V in 0:1 and see if that particular
// throttleValues[B]<V then we select it as the center. That way, the higher the throttle,
// the less the chance to selecting that location to be the center of the next bump.
int tries = numPoints * 3;
while (tries-- > 0) {
float v = random.nextFloat();
int b = random.nextInt(numPoints);
if (throttleValues[b].throttle < v) {
return b;
}
}
return random.nextInt(numPoints); //. give up and just choose one uniformly
}
private void editToggleBrake(int idx){
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].brake = !throttleValues[idx].brake;
}
private void editSetBrake(int idx){
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].brake = true;
}
private void editClearBrake(int idx){
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].brake = false;
}
private void editIncreaseThrottle(int idx) {
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].throttle = min(throttleValues[idx].throttle + editThrottleChange, 1);
}
private void editDecreaseThrottle(int idx) {
if (idx < 0 || idx >= numPoints) {
return;
}
throttleValues[idx].throttle = max(throttleValues[idx].throttle - editThrottleChange, 0);
}
private void guessThrottleFromTrackModel() {
if (getTrack() == null) {
log.warning("null track");
return;
}
getTrack().updateCurvature();
float[] curvatures = getTrack().getCurvatureAtPoints();
for (int i = 0; i < curvatures.length; i++) {
curvatures[i] = (float) Math.abs(curvatures[i]);
}
final int nfilt = numPoints / 30;
float[] smoothed = new float[curvatures.length];
for (int i = nfilt - 1; i < curvatures.length; i++) {
float s = 0;
for (int j = 0; j < nfilt; j++) {
s += curvatures[i - j];
}
s /= nfilt;
smoothed[i] = s;
}
for (int i = 0; i < nfilt - 1; i++) {
smoothed[i] = curvatures[i]; // TODO no filter here yet
}
float minCurv = Float.MAX_VALUE;
for (float c : smoothed) {
if (c < minCurv) {
minCurv = c;
}
}
float maxCurv = Float.MIN_VALUE;
for (float c : smoothed) {
if (c > maxCurv) {
maxCurv = c;
}
}
for (int idx = 0; idx < numPoints; idx++) {
int shiftedIdx = idx - nfilt;
if (shiftedIdx < 0) {
shiftedIdx = numPoints + shiftedIdx;
}
throttleValues[shiftedIdx].throttle = min(1, startingThrottleValue * 2 * (float) Math.pow((smoothed[idx] / maxCurv), .15));
}
}
} // ThrottleProfile
private float min(float a, float b) {
return a < b ? a : b;
}
private float max(float a, float b) {
return a > b ? a : b;
}
// mouse control of throttle throttleValues
@Override
public void setSelected(boolean yes) {
super.setSelected(yes);
if (glCanvas == null) {
return;
}
if (yes) {
glCanvas.addMouseListener(this);
glCanvas.addMouseMotionListener(this);
} else {
glCanvas.removeMouseListener(this);
glCanvas.removeMouseMotionListener(this);
}
}
@Override
public void mouseClicked(MouseEvent e) {
}
@Override
public void mousePressed(MouseEvent e) {
}
@Override
public void mouseReleased(MouseEvent e) {
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
}
// helpers
private Point getPixel(MouseEvent e) {
if (canvas == null) {
return null;
}
Point p = canvas.getPixelFromMouseEvent(e);
if (canvas.wasMousePixelInsideChipBounds()) {
return p;
} else {
return null;
}
}
private boolean isShift(MouseEvent e) {
if (e.isShiftDown() && !e.isControlDown() && !e.isAltDown()) {
return true;
} else {
return false;
}
}
private boolean isControl(MouseEvent e) {
if (!e.isShiftDown() && e.isControlDown() && !e.isAltDown()) {
return true;
} else {
return false;
}
}
private int getIndex(MouseEvent e) {
if (getTrack() == null) {
log.warning("null track model");
return -1;
}
Point p = getPixel(e);
if (p == null) {
return -1;
}
return getTrack().findClosestIndex(p, 0, true);
}
private int lastEditIdx = -1;
enum EditState {
Increae, Decrease, None
};
volatile EditState editState = EditState.None;
@Override
public void mouseDragged(MouseEvent e) {
if (currentProfile == null) {
return;
}
int idx = -1;
if ((idx = getIndex(e)) == -1) {
return;
}
if (idx != lastEditIdx) {
if(e.isAltDown() && e.isShiftDown()){
// brake point
currentProfile.editClearBrake(idx);
editState = EditState.None;
glCanvas.repaint();
} else if(e.isAltDown() && !e.isShiftDown()){
// brake point
currentProfile.editSetBrake(idx);
editState = EditState.None;
glCanvas.repaint();
} else if(isShift(e)) {
currentProfile.editIncreaseThrottle(idx);
editState = EditState.Increae;
glCanvas.repaint();
} else if (isControl(e)) {
currentProfile.editDecreaseThrottle(idx);
editState = EditState.Decrease;
glCanvas.repaint();
} else {
editState = EditState.None;
}
}
lastEditIdx = idx;
}
@Override
public void mouseMoved(MouseEvent e) {
if (isShift(e)) {
editState = EditState.Increae;
} else if (isControl(e)) {
editState = EditState.Decrease;
} else {
editState = EditState.None;
}
}
private boolean hasBlendChecked = false;
private boolean hasBlend = false;
// GLUT glut=new GLUT();
/** Displays the extracted track points */
private void drawThrottlePainter(GLAutoDrawable drawable) {
if (isSelected() && getTrack() != null && getTrack().getPointList() != null && currentProfile != null) {
Point mp = glCanvas.getMousePosition();
Point p = canvas.getPixelFromPoint(mp);
if (p == null) {
return;
}
GL gl = drawable.getGL();
if (!hasBlendChecked) {
hasBlendChecked = true;
String glExt = gl.glGetString(GL.GL_EXTENSIONS);
if (glExt.indexOf("GL_EXT_blend_color") != -1) {
hasBlend = true;
}
}
if (hasBlend) {
try {
gl.glEnable(GL.GL_BLEND);
gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA);
gl.glBlendEquation(GL.GL_FUNC_ADD);
} catch (GLException e) {
e.printStackTrace();
hasBlend = false;
}
}
switch (editState) {
case None:
gl.glColor4f(.25f, .25f, 0, .3f);
break;
case Increae:
gl.glColor4f(0, .45f, 0, .5f);
break;
case Decrease:
gl.glColor4f(.45f, .0f, 0, .5f);
}
gl.glPushMatrix();
gl.glTranslatef(p.x, p.y, 0);
if (quad == null) {
quad = glu.gluNewQuadric();
}
glu.gluQuadricDrawStyle(quad, GLU.GLU_FILL);
glu.gluDisk(quad, 0, 5, 32, 1);
gl.glPopMatrix();
chip.getCanvas().checkGLError(gl, glu, "in drawThrottlePainterk");
}
}
}
| added space from crash to brake segments
git-svn-id: e3d3b427d532171a6bd7557d8a4952a393b554a2@2501 b7f4320f-462c-0410-a916-d9f35bb82d52
| src/ch/unizh/ini/jaer/projects/virtualslotcar/EvolutionaryThrottleController.java | added space from crash to brake segments |
|
Java | lgpl-2.1 | c4e1b4aeb4fff1a5b614bf4d3a6919b5de86e297 | 0 | rekii/toxiclibs,postspectacular/toxiclibs,rekii/toxiclibs,postspectacular/toxiclibs,ruby-processing/toxiclibs | /*
* __ .__ .__ ._____.
* _/ |_ _______ __|__| ____ | | |__\_ |__ ______
* \ __\/ _ \ \/ / |/ ___\| | | || __ \ / ___/
* | | ( <_> > <| \ \___| |_| || \_\ \\___ \
* |__| \____/__/\_ \__|\___ >____/__||___ /____ >
* \/ \/ \/ \/
*
* Copyright (c) 2006-2011 Karsten Schmidt
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* http://creativecommons.org/licenses/LGPL/2.1/
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
package toxi.geom;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import toxi.geom.Line2D.LineIntersection;
import toxi.geom.Line2D.LineIntersection.Type;
import toxi.geom.mesh.Mesh3D;
import toxi.geom.mesh.TriangleMesh;
import toxi.math.MathUtils;
/**
* Container type for convex polygons. Implements {@link Shape2D}.
*/
public class Polygon2D implements Shape2D {
public List<Vec2D> vertices = new ArrayList<Vec2D>();
public Polygon2D() {
}
public Polygon2D(List<Vec2D> points) {
for (Vec2D p : points) {
add(p.copy());
}
}
public Polygon2D(Vec2D... points) {
for (Vec2D p : points) {
add(p.copy());
}
}
/**
* Adds a new vertex to the polygon (builder pattern).
*
* @param p
* vertex point to add
* @return itself
*/
public Polygon2D add(Vec2D p) {
if (!vertices.contains(p)) {
vertices.add(p);
}
return this;
}
public boolean containsPoint(ReadonlyVec2D p) {
int num = vertices.size();
int i, j = num - 1;
boolean oddNodes = false;
float px = p.x();
float py = p.y();
for (i = 0; i < num; i++) {
Vec2D vi = vertices.get(i);
Vec2D vj = vertices.get(j);
if (vi.y < py && vj.y >= py || vj.y < py && vi.y >= py) {
if (vi.x + (py - vi.y) / (vj.y - vi.y) * (vj.x - vi.x) < px) {
oddNodes = !oddNodes;
}
}
j = i;
}
return oddNodes;
}
public Polygon2D copy() {
return new Polygon2D(vertices);
}
/**
* Flips the ordering of the polygon's vertices.
*
* @return itself
*/
public Polygon2D flipVertexOrder() {
Collections.reverse(vertices);
return this;
}
/**
* Computes the area of the polygon, provided it isn't self intersecting.
* Code ported from:
* http://local.wasp.uwa.edu.au/~pbourke/geometry/polyarea/
*
* @return polygon area
*/
public float getArea() {
float area = 0;
int numPoints = vertices.size();
for (int i = 0; i < numPoints; i++) {
Vec2D a = vertices.get(i);
Vec2D b = vertices.get((i + 1) % numPoints);
area += a.x * b.y;
area -= a.y * b.x;
}
area *= 0.5f;
return area;
}
/**
* Computes the polygon's centre of mass. Code ported from:
* http://local.wasp.uwa.edu.au/~pbourke/geometry/polyarea/
*
* @return centroid point
*/
public Vec2D getCentroid() {
Vec2D res = new Vec2D();
int numPoints = vertices.size();
for (int i = 0; i < numPoints; i++) {
Vec2D a = vertices.get(i);
Vec2D b = vertices.get((i + 1) % numPoints);
float factor = a.x * b.y - b.x * a.y;
res.x += (a.x + b.x) * factor;
res.y += (a.y + b.y) * factor;
}
return res.scale(1f / (getArea() * 6));
}
public float getCircumference() {
float circ = 0;
for (int i = 0, num = vertices.size(); i < num; i++) {
circ += vertices.get(i).distanceTo(vertices.get((i + 1) % num));
}
return circ;
}
/**
* Returns the number of polygon vertices.
*
* @return vertex count
*/
public int getNumPoints() {
return vertices.size();
}
/**
* Checks if the vertices of this polygon are in clockwise ordering by
* examining all vertices as a sequence of triangles. Only works if triangle
* is convex.
*
* @return true, if clockwise
*/
public boolean isClockwise() {
boolean isClockwise = true;
final int num = vertices.size() - 2;
if (num > 0) {
for (int i = 0; i < num; i++) {
if (!Triangle2D.isClockwise(vertices.get(i),
vertices.get(i + 1), vertices.get(i + 2))) {
isClockwise = false;
break;
}
}
} else {
isClockwise = false;
}
return isClockwise;
}
/**
* Checks if the polygon is convex.
*
* @return true, if convex.
*/
public boolean isConvex() {
boolean isPositive = false;
int num = vertices.size();
for (int i = 0; i < num; i++) {
int prev = (i == 0) ? num - 1 : i - 1;
int next = (i == num - 1) ? 0 : i + 1;
Vec2D d0 = vertices.get(i).sub(vertices.get(prev));
Vec2D d1 = vertices.get(next).sub(vertices.get(i));
boolean newIsP = (d0.cross(d1) > 0);
if (i == 0) {
isPositive = newIsP;
} else if (isPositive != newIsP) {
return false;
}
}
return true;
}
/**
* Given the sequentially connected points p1, p2, p3, this function returns
* a bevel-offset replacement for point p2.
*
* Note: If vectors p1->p2 and p2->p3 are exactly 180 degrees opposed, or if
* either segment is zero then no offset will be applied.
*
* @param x1
* @param y1
* @param x2
* @param y2
* @param x3
* @param y3
* @param distance
* @param out
*
* @see http://alienryderflex.com/polygon_inset/
*/
protected void offsetCorner(float x1, float y1, float x2, float y2,
float x3, float y3, float distance, Vec2D out) {
float c1 = x2, d1 = y2, c2 = x2, d2 = y2;
float dx1, dy1, dist1, dx2, dy2, dist2, insetX, insetY;
dx1 = x2 - x1;
dy1 = y2 - y1;
dist1 = (float) Math.sqrt(dx1 * dx1 + dy1 * dy1);
dx2 = x3 - x2;
dy2 = y3 - y2;
dist2 = (float) Math.sqrt(dx2 * dx2 + dy2 * dy2);
if (dist1 < MathUtils.EPS || dist2 < MathUtils.EPS) {
return;
}
dist1 = distance / dist1;
dist2 = distance / dist2;
insetX = dy1 * dist1;
insetY = -dx1 * dist1;
x1 += insetX;
c1 += insetX;
y1 += insetY;
d1 += insetY;
insetX = dy2 * dist2;
insetY = -dx2 * dist2;
x3 += insetX;
c2 += insetX;
y3 += insetY;
d2 += insetY;
if (c1 == c2 && d1 == d2) {
out.set(c1, d1);
return;
}
Line2D l1 = new Line2D(new Vec2D(x1, y1), new Vec2D(c1, d1));
Line2D l2 = new Line2D(new Vec2D(c2, d2), new Vec2D(x3, y3));
LineIntersection isec = l1.intersectLine(l2);
final Vec2D ipos = isec.getPos();
if (ipos != null) {
out.set(ipos);
}
}
/**
* Moves each line segment of the polygon in/outward perpendicular by the
* given distance. New line segments and polygon vertices are created by
* computing the intersection points of the displaced segments. Choosing an
* too large displacement amount will result in deformation/undefined
* behavior with various self intersections. Should that happen, please try
* to clean up the shape using the {@link #toOutline()} method.
*
* @param distance
* offset/inset distance (negative for inset)
* @return itself
*/
public Polygon2D offsetShape(float distance) {
int corners = vertices.size();
if (corners > 2) {
int i;
float a, b, c, d, e, f;
float startX = vertices.get(0).x;
float startY = vertices.get(0).y;
c = vertices.get(corners - 1).x;
d = vertices.get(corners - 1).y;
e = startX;
f = startY;
for (i = 0; i < corners - 1; i++) {
a = c;
b = d;
c = e;
d = f;
e = vertices.get(i + 1).x;
f = vertices.get(i + 1).y;
offsetCorner(a, b, c, d, e, f, distance, vertices.get(i));
}
offsetCorner(c, d, e, f, startX, startY, distance, vertices.get(i));
}
return this;
}
public Polygon2D setNumVertices(int max) {
int num = vertices.size();
if (num < max) {
while (num < max) {
// find longest edge
int longestID = 0;
float maxD = 0;
for (int i = 0; i < num; i++) {
float d =
vertices.get(i).distanceToSquared(
vertices.get((i + 1) % num));
if (d > maxD) {
longestID = i;
maxD = d;
}
}
// insert mid point of longest segment in vertex list
Vec2D m =
vertices.get(longestID)
.add(vertices.get((longestID + 1) % num))
.scaleSelf(0.5f);
vertices.add(longestID + 1, m);
num++;
}
} else {
// TODO only works for increasing vert count so far, need to handle
// reduction too
throw new UnsupportedOperationException(
"Polygon vertex reduction not yet implemented.");
}
return this;
}
/**
* Applies a laplacian-style smooth operation to all polygon vertices,
* causing sharp corners/angles to widen and results in a general smoother
* shape. Let the current vertex be A and its neighbours P and Q, then A
* will be moved by a specified amount into the direction given by
* (P-A)+(Q-A). Additionally, and to avoid shrinking of the shape through
* repeated iteration of this procedure, the vector A - C (Polygon centroid)
* is added as counter force and a weight for its impact can be specified.
* To keep the average size of the polygon stable, this weight value should
* be ~1/2 of the smooth amount.
*
* @param amount
* smooth amount (between 0 < x < 0.5)
* @param baseWeight
* counter weight (0 <= x < 1/2 * smooth amount)
* @return itself
*/
public Polygon2D smooth(float amount, float baseWeight) {
Vec2D centroid = getCentroid();
int num = vertices.size();
List<Vec2D> filtered = new ArrayList<Vec2D>(num);
for (int i = 0, j = num - 1, k = 1; i < num; i++) {
Vec2D a = vertices.get(i);
Vec2D dir =
vertices.get(j).sub(a).addSelf(vertices.get(k).sub(a))
.addSelf(a.sub(centroid).scaleSelf(baseWeight));
filtered.add(a.add(dir.scaleSelf(amount)));
j++;
if (j == num) {
j = 0;
}
k++;
if (k == num) {
k = 0;
}
}
vertices.clear();
vertices.addAll(filtered);
return this;
}
public Mesh3D toMesh(Mesh3D mesh) {
if (mesh == null) {
mesh = new TriangleMesh();
}
final int num = vertices.size();
final Vec3D centroid = getCentroid().to3DXY();
for (int i = 1; i <= num; i++) {
mesh.addFace(centroid, vertices.get(i % num).to3DXY(), vertices
.get(i - 1).to3DXY());
}
return mesh;
}
/**
* Attempts to remove all internal self-intersections and creates a new
* polygon only consisting of perimeter vertices.
*
* @return true, if process completed succcessfully.
*
* @see http://alienryderflex.com/polygon_perimeter/
*/
public boolean toOutline() {
List<Vec2D> newVerts = new ArrayList<Vec2D>();
int corners = vertices.size();
int maxSegs = corners * 3;
Vec2D[] segments = new Vec2D[maxSegs];
Vec2D[] segEnds = new Vec2D[maxSegs];
float[] segAngles = new float[maxSegs];
Vec2D start = vertices.get(0).copy();
float lastAngle = MathUtils.PI;
float a, b, c, d, e, f, angleDif, bestAngleDif;
int i, j = corners - 1, segs = 0;
if (corners > maxSegs) {
return false;
}
// 1,3. Reformulate the polygon as a set of line segments, and choose a
// starting point that must be on the perimeter.
for (i = 0; i < corners; i++) {
Vec2D pi = vertices.get(i);
Vec2D pj = vertices.get(j);
if (!pi.equals(pj)) {
segments[segs] = pi;
segEnds[segs++] = pj;
}
j = i;
if (pi.y > start.y || (pi.y == start.y && pi.x < start.x)) {
start.set(pi);
}
}
if (segs == 0) {
return false;
}
// 2. Break the segments up at their intersection points.
for (i = 0; i < segs - 1; i++) {
for (j = i + 1; j < segs; j++) {
Line2D li = new Line2D(segments[i], segEnds[i]);
Line2D lj = new Line2D(segments[j], segEnds[j]);
LineIntersection isec = li.intersectLine(lj);
if (isec.getType() == Type.INTERSECTING) {
Vec2D ipos = isec.getPos();
if (!ipos.equals(segments[i]) && !ipos.equals(segEnds[i])) {
if (segs == maxSegs) {
return false;
}
segments[segs] = segments[i].copy();
segEnds[segs++] = ipos.copy();
segments[i] = ipos.copy();
}
if (!ipos.equals(segments[j]) && !ipos.equals(segEnds[j])) {
if (segs == maxSegs) {
return false;
}
segments[segs] = segments[j].copy();
segEnds[segs++] = ipos.copy();
segments[j] = ipos.copy();
}
}
}
}
// Calculate the angle of each segment.
for (i = 0; i < segs; i++) {
segAngles[i] = segEnds[i].sub(segments[i]).positiveHeading();
}
// 4. Build the perimeter polygon.
c = start.x;
d = start.y;
a = c - 1;
b = d;
e = 0;
f = 0;
newVerts.add(new Vec2D(c, d));
corners = 1;
while (true) {
bestAngleDif = MathUtils.TWO_PI;
for (i = 0; i < segs; i++) {
if (segments[i].x == c && segments[i].y == d
&& (segEnds[i].x != a || segEnds[i].y != b)) {
angleDif = lastAngle - segAngles[i];
while (angleDif >= MathUtils.TWO_PI) {
angleDif -= MathUtils.TWO_PI;
}
while (angleDif < 0) {
angleDif += MathUtils.TWO_PI;
}
if (angleDif < bestAngleDif) {
bestAngleDif = angleDif;
e = segEnds[i].x;
f = segEnds[i].y;
}
}
if (segEnds[i].x == c && segEnds[i].y == d
&& (segments[i].x != a || segments[i].y != b)) {
angleDif = lastAngle - segAngles[i] + MathUtils.PI;
while (angleDif >= MathUtils.TWO_PI) {
angleDif -= MathUtils.TWO_PI;
}
while (angleDif < 0) {
angleDif += MathUtils.TWO_PI;
}
if (angleDif < bestAngleDif) {
bestAngleDif = angleDif;
e = segments[i].x;
f = segments[i].y;
}
}
}
if (corners > 1 && c == newVerts.get(0).x && d == newVerts.get(0).y
&& e == newVerts.get(1).x && f == newVerts.get(1).y) {
corners--;
vertices = newVerts;
return true;
}
if (bestAngleDif == MathUtils.TWO_PI || corners == maxSegs) {
return false;
}
lastAngle -= bestAngleDif + MathUtils.PI;
newVerts.add(new Vec2D(e, f));
corners++;
a = c;
b = d;
c = e;
d = f;
}
}
public String toString() {
StringBuilder buf = new StringBuilder();
for (Iterator<Vec2D> i = vertices.iterator(); i.hasNext();) {
buf.append(i.next().toString());
if (i.hasNext()) {
buf.append(", ");
}
}
return buf.toString();
}
}
| src.core/toxi/geom/Polygon2D.java | /*
* Copyright (c) 2006-2011 Karsten Schmidt
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* http://creativecommons.org/licenses/LGPL/2.1/
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
package toxi.geom;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import toxi.geom.Line2D.LineIntersection;
import toxi.geom.Line2D.LineIntersection.Type;
import toxi.geom.mesh.Mesh3D;
import toxi.geom.mesh.TriangleMesh;
import toxi.math.MathUtils;
/**
* Container type for convex polygons. Implements {@link Shape2D}.
*/
public class Polygon2D implements Shape2D {
public List<Vec2D> vertices = new ArrayList<Vec2D>();
public Polygon2D() {
}
public Polygon2D(List<Vec2D> points) {
for (Vec2D p : points) {
add(p.copy());
}
}
public Polygon2D(Vec2D... points) {
for (Vec2D p : points) {
add(p.copy());
}
}
/**
* Adds a new vertex to the polygon (builder pattern).
*
* @param p
* vertex point to add
* @return itself
*/
public Polygon2D add(Vec2D p) {
if (!vertices.contains(p)) {
vertices.add(p);
}
return this;
}
public boolean containsPoint(ReadonlyVec2D p) {
int num = vertices.size();
int i, j = num - 1;
boolean oddNodes = false;
float px = p.x();
float py = p.y();
for (i = 0; i < num; i++) {
Vec2D vi = vertices.get(i);
Vec2D vj = vertices.get(j);
if (vi.y < py && vj.y >= py || vj.y < py && vi.y >= py) {
if (vi.x + (py - vi.y) / (vj.y - vi.y) * (vj.x - vi.x) < px) {
oddNodes = !oddNodes;
}
}
j = i;
}
return oddNodes;
}
/**
* Flips the ordering of the polygon's vertices.
*
* @return itself
*/
public Polygon2D flipVertexOrder() {
Collections.reverse(vertices);
return this;
}
/**
* Computes the area of the polygon, provided it isn't self intersecting.
* Code ported from:
* http://local.wasp.uwa.edu.au/~pbourke/geometry/polyarea/
*
* @return polygon area
*/
public float getArea() {
float area = 0;
int numPoints = vertices.size();
for (int i = 0; i < numPoints; i++) {
Vec2D a = vertices.get(i);
Vec2D b = vertices.get((i + 1) % numPoints);
area += a.x * b.y;
area -= a.y * b.x;
}
area *= 0.5f;
return area;
}
/**
* Computes the polygon's centre of mass. Code ported from:
* http://local.wasp.uwa.edu.au/~pbourke/geometry/polyarea/
*
* @return centroid point
*/
public Vec2D getCentroid() {
Vec2D res = new Vec2D();
int numPoints = vertices.size();
for (int i = 0; i < numPoints; i++) {
Vec2D a = vertices.get(i);
Vec2D b = vertices.get((i + 1) % numPoints);
float factor = a.x * b.y - b.x * a.y;
res.x += (a.x + b.x) * factor;
res.y += (a.y + b.y) * factor;
}
return res.scale(1f / (getArea() * 6));
}
public float getCircumference() {
float circ = 0;
for (int i = 0, num = vertices.size(); i < num; i++) {
circ += vertices.get(i).distanceTo(vertices.get((i + 1) % num));
}
return circ;
}
/**
* Returns the number of polygon vertices.
*
* @return vertex count
*/
public int getNumPoints() {
return vertices.size();
}
/**
* Checks if the vertices of this polygon are in clockwise ordering by
* examining the first 3.
*
* @return true, if clockwise
*/
public boolean isClockwise() {
if (vertices.size() > 2) {
return Triangle2D.isClockwise(vertices.get(0), vertices.get(1),
vertices.get(2));
}
return false;
}
/**
* Given the sequentially connected points p1, p2, p3, this function returns
* a bevel-offset replacement for point p2.
*
* Note: If vectors p1->p2 and p2->p3 are exactly 180 degrees opposed, or if
* either segment is zero then no offset will be applied.
*
* @param x1
* @param y1
* @param x2
* @param y2
* @param x3
* @param y3
* @param distance
* @param out
*
* @see http://alienryderflex.com/polygon_inset/
*/
protected void offsetCorner(float x1, float y1, float x2, float y2,
float x3, float y3, float distance, Vec2D out) {
float c1 = x2, d1 = y2, c2 = x2, d2 = y2;
float dx1, dy1, dist1, dx2, dy2, dist2, insetX, insetY;
dx1 = x2 - x1;
dy1 = y2 - y1;
dist1 = (float) Math.sqrt(dx1 * dx1 + dy1 * dy1);
dx2 = x3 - x2;
dy2 = y3 - y2;
dist2 = (float) Math.sqrt(dx2 * dx2 + dy2 * dy2);
if (dist1 < MathUtils.EPS || dist2 < MathUtils.EPS) {
return;
}
dist1 = distance / dist1;
dist2 = distance / dist2;
insetX = dy1 * dist1;
insetY = -dx1 * dist1;
x1 += insetX;
c1 += insetX;
y1 += insetY;
d1 += insetY;
insetX = dy2 * dist2;
insetY = -dx2 * dist2;
x3 += insetX;
c2 += insetX;
y3 += insetY;
d2 += insetY;
if (c1 == c2 && d1 == d2) {
out.set(c1, d1);
return;
}
Line2D l1 = new Line2D(new Vec2D(x1, y1), new Vec2D(c1, d1));
Line2D l2 = new Line2D(new Vec2D(c2, d2), new Vec2D(x3, y3));
LineIntersection isec = l1.intersectLine(l2);
final Vec2D ipos = isec.getPos();
if (ipos != null) {
out.set(ipos);
}
}
/**
* Moves each line segment of the polygon in/outward perpendicular by the
* given distance. New line segments and polygon vertices are created by
* computing the intersection points of the displaced segments. Choosing an
* too large displacement amount will result in deformation/undefined
* behavior with various self intersections. Should that happen, please try
* to clean up the shape using the {@link #toOutline()} method.
*
* @param distance
* offset/inset distance (negative for inset)
* @return itself
*/
public Polygon2D offsetShape(float distance) {
int corners = vertices.size();
if (corners > 2) {
int i;
float a, b, c, d, e, f;
float startX = vertices.get(0).x;
float startY = vertices.get(0).y;
c = vertices.get(corners - 1).x;
d = vertices.get(corners - 1).y;
e = startX;
f = startY;
for (i = 0; i < corners - 1; i++) {
a = c;
b = d;
c = e;
d = f;
e = vertices.get(i + 1).x;
f = vertices.get(i + 1).y;
offsetCorner(a, b, c, d, e, f, distance, vertices.get(i));
}
offsetCorner(c, d, e, f, startX, startY, distance, vertices.get(i));
}
return this;
}
/**
* Applies a laplacian-style smooth operation to all polygon vertices,
* causing sharp corners/angles to widen and results in a general smoother
* shape. Let the current vertex be A and its neighbours P and Q, then A
* will be moved by a specified amount into the direction given by
* (P-A)+(Q-A). Additionally, and to avoid shrinking of the shape through
* repeated iteration of this procedure, the vector A - C (Polygon centroid)
* is added as counter force and a weight for its impact can be specified.
* To keep the average size of the polygon stable, this weight value should
* be ~1/2 of the smooth amount.
*
* @param amount
* smooth amount (between 0 < x < 0.5)
* @param baseWeight
* counter weight (0 <= x < 1/2 * smooth amount)
* @return itself
*/
public Polygon2D smooth(float amount, float baseWeight) {
Vec2D centroid = getCentroid();
int num = vertices.size();
List<Vec2D> filtered = new ArrayList<Vec2D>(num);
for (int i = 0, j = num - 1, k = 1; i < num; i++) {
Vec2D a = vertices.get(i);
Vec2D dir =
vertices.get(j).sub(a).addSelf(vertices.get(k).sub(a))
.addSelf(a.sub(centroid).scaleSelf(baseWeight));
filtered.add(a.add(dir.scaleSelf(amount)));
j++;
if (j == num) {
j = 0;
}
k++;
if (k == num) {
k = 0;
}
}
vertices.clear();
vertices.addAll(filtered);
return this;
}
public Mesh3D toMesh(Mesh3D mesh) {
if (mesh == null) {
mesh = new TriangleMesh();
}
final int num = vertices.size();
final Vec3D centroid = getCentroid().to3DXY();
for (int i = 1; i <= num; i++) {
mesh.addFace(centroid, vertices.get(i % num).to3DXY(), vertices
.get(i - 1).to3DXY());
}
return mesh;
}
/**
* Attempts to remove all internal self-intersections and creates a new
* polygon only consisting of perimeter vertices.
*
* @return true, if process completed succcessfully.
*
* @see http://alienryderflex.com/polygon_perimeter/
*/
public boolean toOutline() {
List<Vec2D> newVerts = new ArrayList<Vec2D>();
int corners = vertices.size();
int maxSegs = corners * 3;
Vec2D[] segments = new Vec2D[maxSegs];
Vec2D[] segEnds = new Vec2D[maxSegs];
float[] segAngles = new float[maxSegs];
Vec2D start = vertices.get(0).copy();
float lastAngle = MathUtils.PI;
float a, b, c, d, e, f, angleDif, bestAngleDif;
int i, j = corners - 1, segs = 0;
if (corners > maxSegs) {
return false;
}
// 1,3. Reformulate the polygon as a set of line segments, and choose a
// starting point that must be on the perimeter.
for (i = 0; i < corners; i++) {
Vec2D pi = vertices.get(i);
Vec2D pj = vertices.get(j);
if (!pi.equals(pj)) {
segments[segs] = pi;
segEnds[segs++] = pj;
}
j = i;
if (pi.y > start.y || (pi.y == start.y && pi.x < start.x)) {
start.set(pi);
}
}
if (segs == 0) {
return false;
}
// 2. Break the segments up at their intersection points.
for (i = 0; i < segs - 1; i++) {
for (j = i + 1; j < segs; j++) {
Line2D li = new Line2D(segments[i], segEnds[i]);
Line2D lj = new Line2D(segments[j], segEnds[j]);
LineIntersection isec = li.intersectLine(lj);
if (isec.getType() == Type.INTERSECTING) {
Vec2D ipos = isec.getPos();
if (!ipos.equals(segments[i]) && !ipos.equals(segEnds[i])) {
if (segs == maxSegs) {
return false;
}
segments[segs] = segments[i].copy();
segEnds[segs++] = ipos.copy();
segments[i] = ipos.copy();
}
if (!ipos.equals(segments[j]) && !ipos.equals(segEnds[j])) {
if (segs == maxSegs) {
return false;
}
segments[segs] = segments[j].copy();
segEnds[segs++] = ipos.copy();
segments[j] = ipos.copy();
}
}
}
}
// Calculate the angle of each segment.
for (i = 0; i < segs; i++) {
segAngles[i] = segEnds[i].sub(segments[i]).positiveHeading();
}
// 4. Build the perimeter polygon.
c = start.x;
d = start.y;
a = c - 1;
b = d;
e = 0;
f = 0;
newVerts.add(new Vec2D(c, d));
corners = 1;
while (true) {
bestAngleDif = MathUtils.TWO_PI;
for (i = 0; i < segs; i++) {
if (segments[i].x == c && segments[i].y == d
&& (segEnds[i].x != a || segEnds[i].y != b)) {
angleDif = lastAngle - segAngles[i];
while (angleDif >= MathUtils.TWO_PI) {
angleDif -= MathUtils.TWO_PI;
}
while (angleDif < 0) {
angleDif += MathUtils.TWO_PI;
}
if (angleDif < bestAngleDif) {
bestAngleDif = angleDif;
e = segEnds[i].x;
f = segEnds[i].y;
}
}
if (segEnds[i].x == c && segEnds[i].y == d
&& (segments[i].x != a || segments[i].y != b)) {
angleDif = lastAngle - segAngles[i] + MathUtils.PI;
while (angleDif >= MathUtils.TWO_PI) {
angleDif -= MathUtils.TWO_PI;
}
while (angleDif < 0) {
angleDif += MathUtils.TWO_PI;
}
if (angleDif < bestAngleDif) {
bestAngleDif = angleDif;
e = segments[i].x;
f = segments[i].y;
}
}
}
if (corners > 1 && c == newVerts.get(0).x && d == newVerts.get(0).y
&& e == newVerts.get(1).x && f == newVerts.get(1).y) {
corners--;
vertices = newVerts;
return true;
}
if (bestAngleDif == MathUtils.TWO_PI || corners == maxSegs) {
return false;
}
lastAngle -= bestAngleDif + MathUtils.PI;
newVerts.add(new Vec2D(e, f));
corners++;
a = c;
b = d;
c = e;
d = f;
}
}
public String toString() {
StringBuilder buf = new StringBuilder();
for (Iterator<Vec2D> i = vertices.iterator(); i.hasNext();) {
buf.append(i.next().toString());
if (i.hasNext()) {
buf.append(", ");
}
}
return buf.toString();
}
}
| adding Polygon2D.copy(), isConvex(), adding setNumVertices() to insert vertices until the requested number is reached (currently only supports increase, not reduction), bugfixing/improving isClockwise() to check all polygon vertices
--HG--
branch : toxiclibs
| src.core/toxi/geom/Polygon2D.java | adding Polygon2D.copy(), isConvex(), adding setNumVertices() to insert vertices until the requested number is reached (currently only supports increase, not reduction), bugfixing/improving isClockwise() to check all polygon vertices |
|
Java | lgpl-2.1 | fb59461739f8d7a1875e124c7c8da914e0eeedd6 | 0 | GNOME/java-atk-wrapper,GNOME/java-atk-wrapper,GNOME/java-atk-wrapper | /*
* Java ATK Wrapper for GNOME
* Copyright (C) 2009 Sun Microsystems Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.GNOME.Accessibility;
import javax.accessibility.*;
import java.lang.ref.WeakReference;
public class AtkSelection {
WeakReference<AccessibleContext> _ac;
WeakReference<AccessibleSelection> _acc_selection;
public AtkSelection (AccessibleContext ac) {
super();
this._ac = new WeakReference<AccessibleContext>(ac);
this._acc_selection = new WeakReference<AccessibleSelection>(ac.getAccessibleSelection());
}
public static AtkSelection createAtkSelection(AccessibleContext ac){
return AtkUtil.invokeInSwing ( () -> { return new AtkSelection(ac); }, null);
}
public boolean add_selection (int i) {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
return AtkUtil.invokeInSwing( () -> {
acc_selection.addAccessibleSelection(i);
return is_child_selected(i);
}, false);
}
public boolean clear_selection () {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
AtkUtil.invokeInSwing( () -> { acc_selection.clearAccessibleSelection(); });
return true;
}
public AccessibleContext ref_selection (int i) {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return null;
return AtkUtil.invokeInSwing ( () -> {
Accessible sel = acc_selection.getAccessibleSelection(i);
if (sel == null)
return null;
return sel.getAccessibleContext();
}, null);
}
public int get_selection_count () {
AccessibleContext ac = _ac.get();
if (ac == null)
return 0;
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return 0;
return AtkUtil.invokeInSwing ( () -> {
int count = 0;
for(int i = 0; i < ac.getAccessibleChildrenCount(); i++) {
if (acc_selection.isAccessibleChildSelected(i))
count++;
}
return count;
}, 0);
//A bug in AccessibleJMenu??
//return acc_selection.getAccessibleSelectionCount();
}
public boolean is_child_selected (int i) {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
return AtkUtil.invokeInSwing ( () -> { return acc_selection.isAccessibleChildSelected(i); }, false);
}
public boolean remove_selection (int i) {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
return AtkUtil.invokeInSwing( () -> {
acc_selection.removeAccessibleSelection(i);
return !is_child_selected(i);
}, false);
}
public boolean select_all_selection () {
AccessibleContext ac = _ac.get();
if (ac == null)
return false;
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
AccessibleStateSet stateSet = ac.getAccessibleStateSet();
return AtkUtil.invokeInSwing ( () -> {
if (stateSet.contains(AccessibleState.MULTISELECTABLE)) {
acc_selection.selectAllAccessibleSelection();
return true;
}
return false;
}, false);
}
}
| wrapper/org/GNOME/Accessibility/AtkSelection.java | /*
* Java ATK Wrapper for GNOME
* Copyright (C) 2009 Sun Microsystems Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.GNOME.Accessibility;
import javax.accessibility.*;
import java.lang.ref.WeakReference;
public class AtkSelection {
WeakReference<AccessibleContext> _ac;
WeakReference<AccessibleSelection> _acc_selection;
public AtkSelection (AccessibleContext ac) {
super();
this._ac = new WeakReference<AccessibleContext>(ac);
this._acc_selection = new WeakReference<AccessibleSelection>(ac.getAccessibleSelection());
}
public static AtkSelection createAtkSelection(AccessibleContext ac){
return AtkUtil.invokeInSwing ( () -> { return new AtkSelection(ac); }, null);
}
public boolean add_selection (int i) {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
return AtkUtil.invokeInSwing( () -> {
acc_selection.addAccessibleSelection(i);
return is_child_selected(i);
}, false);
}
public boolean clear_selection () {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
AtkUtil.invokeInSwing( () -> { acc_selection.clearAccessibleSelection(); });
return true;
}
public AccessibleContext ref_selection (int i) {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return null;
return AtkUtil.invokeInSwing ( () -> { return acc_selection.getAccessibleSelection(i).getAccessibleContext(); }, null);
}
public int get_selection_count () {
AccessibleContext ac = _ac.get();
if (ac == null)
return 0;
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return 0;
return AtkUtil.invokeInSwing ( () -> {
int count = 0;
for(int i = 0; i < ac.getAccessibleChildrenCount(); i++) {
if (acc_selection.isAccessibleChildSelected(i))
count++;
}
return count;
}, 0);
//A bug in AccessibleJMenu??
//return acc_selection.getAccessibleSelectionCount();
}
public boolean is_child_selected (int i) {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
return AtkUtil.invokeInSwing ( () -> { return acc_selection.isAccessibleChildSelected(i); }, false);
}
public boolean remove_selection (int i) {
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
return AtkUtil.invokeInSwing( () -> {
acc_selection.removeAccessibleSelection(i);
return !is_child_selected(i);
}, false);
}
public boolean select_all_selection () {
AccessibleContext ac = _ac.get();
if (ac == null)
return false;
AccessibleSelection acc_selection = _acc_selection.get();
if (acc_selection == null)
return false;
AccessibleStateSet stateSet = ac.getAccessibleStateSet();
return AtkUtil.invokeInSwing ( () -> {
if (stateSet.contains(AccessibleState.MULTISELECTABLE)) {
acc_selection.selectAllAccessibleSelection();
return true;
}
return false;
}, false);
}
}
| Fix null dereference
| wrapper/org/GNOME/Accessibility/AtkSelection.java | Fix null dereference |
|
Java | apache-2.0 | b5b4100787a4fc5a34d4969f363ddf8ddb8aee08 | 0 | CloudScale-Project/DynamicSpotter,sopeco/DynamicSpotter | /**
* Copyright 2014 SAP AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.spotter.core.detection;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import org.aim.api.exceptions.InstrumentationException;
import org.aim.api.exceptions.MeasurementException;
import org.aim.api.instrumentation.description.InstrumentationDescription;
import org.aim.api.instrumentation.description.InstrumentationDescriptionBuilder;
import org.aim.api.measurement.dataset.DatasetCollection;
import org.aim.api.measurement.dataset.Parameter;
import org.aim.api.measurement.utils.RecordCSVReader;
import org.aim.api.measurement.utils.RecordCSVWriter;
import org.lpe.common.config.GlobalConfiguration;
import org.lpe.common.extension.AbstractExtensionArtifact;
import org.lpe.common.extension.IExtension;
import org.lpe.common.util.LpeFileUtils;
import org.lpe.common.util.LpeStringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spotter.core.Spotter;
import org.spotter.core.config.interpretation.MeasurementEnvironmentFactory;
import org.spotter.core.instrumentation.ISpotterInstrumentation;
import org.spotter.core.instrumentation.InstrumentationBroker;
import org.spotter.core.measurement.IMeasurementController;
import org.spotter.core.measurement.MeasurementBroker;
import org.spotter.core.workload.IWorkloadAdapter;
import org.spotter.core.workload.WorkloadAdapterBroker;
import org.spotter.exceptions.WorkloadException;
import org.spotter.shared.configuration.ConfigKeys;
import org.spotter.shared.result.ResultsLocationConstants;
import org.spotter.shared.result.model.SpotterResult;
import org.spotter.shared.status.DiagnosisStatus;
import com.xeiam.xchart.BitmapEncoder;
import com.xeiam.xchart.Chart;
/**
* The {@link AbstractDetectionController} comprises common functionality of all
* detection controller classes, like initialization, result persistance, etc.
*
* @author Alexander Wert
*
*/
public abstract class AbstractDetectionController extends AbstractExtensionArtifact implements IDetectionController {
private static final double EPSELON = 0.5;
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractDetectionController.class);
/**
* property key for detection name.
*/
public static final long KILO = 1000L;
public static final String DETECTABLE_KEY = "org.spotter.detection.detectable";
private static final int SUT_WARMPUP_DURATION = GlobalConfiguration.getInstance().getPropertyAsInteger(ConfigKeys.PREWARUMUP_DURATION, 180);
private static final int MIN_NUM_USERS = 1;
protected static final String NUMBER_OF_USERS = "numUsers";
protected static final String EXPERIMENT_STEPS_KEY = "numExperimentSteps";
private static final int DPI = 300;
private ISpotterInstrumentation instrumentationController;
protected IMeasurementController measurementController;
protected IWorkloadAdapter workloadAdapter;
protected boolean instrumented = false;
private boolean sutWarmedUp = false;
private Properties problemDetectionConfiguration = new Properties();
private int experimentCount = 0;
private String dataPath;
private String resourcePath;
private List<IExperimentReuser> experimentReuser;
private String parentDataDir;
protected long measurementRampUpTime;
private long estimatedDuration = 0L;
protected long additionalDuration = 0L;
/**
* Constructor.
*
* @param provider
* Provider of the extension.
*/
public AbstractDetectionController(IExtension<IDetectionController> provider) {
super(provider);
experimentReuser = new ArrayList<>();
}
@Override
public SpotterResult analyzeProblem() throws InstrumentationException, MeasurementException, WorkloadException {
try {
if (!GlobalConfiguration.getInstance().getPropertyAsBoolean(ConfigKeys.OMIT_WARMUP, false)) {
additionalDuration += SUT_WARMPUP_DURATION;
}
calculateInitialEstimatedDuration();
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.INITIALIZING);
if (GlobalConfiguration.getInstance().getPropertyAsBoolean(ConfigKeys.OMIT_EXPERIMENTS, false)) {
overwriteDataPath(GlobalConfiguration.getInstance().getProperty(ConfigKeys.DUMMY_EXPERIMENT_DATA));
} else if (this instanceof IExperimentReuser) {
overwriteDataPath(parentDataDir);
} else {
initialize();
if (!GlobalConfiguration.getInstance().getPropertyAsBoolean(ConfigKeys.OMIT_WARMUP, false)) {
warmUpSUT();
}
executeExperiments();
}
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.ANALYSING);
return analyze(loadData());
} catch (Exception e) {
if (e instanceof InstrumentationException) {
throw (InstrumentationException) e;
} else {
if (instrumented) {
instrumentationController.uninstrument();
}
instrumented = false;
String message = "Error during problem analysis by " + this.getClass().getSimpleName()
+ ". Ignoring and resuming!";
LOGGER.warn(message + " Cause: {}", e);
SpotterResult result = new SpotterResult();
result.addMessage(message);
result.setDetected(false);
return result;
}
}
}
/**
* Updates the current progress of this controller.
*/
public void updateEstimatedProgress() {
long elapsedTime = (System.currentTimeMillis() - GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.PPD_RUN_TIMESTAMP, 0L))
/ KILO;
long currentEstimatedOverallDuration = getEstimatedOverallDuration();
// as the estimated overall duration might not have been calculated yet
// and return default
// value 0, it must be checked to be greater 0
if (currentEstimatedOverallDuration > 0) {
Spotter.getInstance()
.getProgress()
.updateProgress(getProvider().getName(), (double) (elapsedTime / getEstimatedOverallDuration()),
getEstimatedOverallDuration() - elapsedTime);
}
}
/**
* This method triggers the load generators to put low load on the system
* under test in order to warm it up. E.g. all required classes of the SUT
* should be loaded.
*
* @throws WorkloadException
*/
private void warmUpSUT() throws WorkloadException {
if (!sutWarmedUp) {
Spotter.getInstance().getProgress().updateProgressStatus(getProvider().getName(), DiagnosisStatus.WARM_UP);
Properties wlProperties = new Properties();
wlProperties.setProperty(IWorkloadAdapter.NUMBER_CURRENT_USERS, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_RAMP_UP_INTERVAL_LENGTH, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_RAMP_UP_NUM_USERS_PER_INTERVAL, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_COOL_DOWN_INTERVAL_LENGTH, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_COOL_DOWN_NUM_USERS_PER_INTERVAL, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_DURATION, String.valueOf(SUT_WARMPUP_DURATION));
workloadAdapter.startLoad(wlProperties);
workloadAdapter.waitForFinishedLoad();
sutWarmedUp = true;
}
}
private void initialize() throws MeasurementException, InstrumentationException, WorkloadException {
Spotter.getInstance().getProgress()
.updateProgressMessage(getProvider().getName(), "Initializing measurement environment...");
long startInitialization = System.currentTimeMillis();
initInstrumentationController();
initMeasurementController();
initWorkloadAdapter();
additionalDuration += (System.currentTimeMillis() - startInitialization) / KILO;
}
private void initWorkloadAdapter() throws WorkloadException {
String measurementEnvironmentFile = GlobalConfiguration.getInstance().getProperty(
ConfigKeys.MEASUREMENT_ENVIRONMENT_FILE);
if (measurementEnvironmentFile == null) {
throw new WorkloadException("Measurement Environment File has not been specified!");
}
List<IWorkloadAdapter> wlAdapters = MeasurementEnvironmentFactory.getInstance().createWorkloadAdapters(
measurementEnvironmentFile);
workloadAdapter = WorkloadAdapterBroker.getInstance();
((WorkloadAdapterBroker) workloadAdapter).setControllers(wlAdapters);
workloadAdapter.initialize();
}
private void initMeasurementController() throws InstrumentationException, MeasurementException {
String measurementEnvironmentFile = GlobalConfiguration.getInstance().getProperty(
ConfigKeys.MEASUREMENT_ENVIRONMENT_FILE);
if (measurementEnvironmentFile == null) {
throw new InstrumentationException("Measurement Environment File has not been specified!");
}
List<IMeasurementController> controllers = MeasurementEnvironmentFactory.getInstance()
.createMeasurementControllers(measurementEnvironmentFile);
measurementController = MeasurementBroker.getInstance();
((MeasurementBroker) measurementController).setControllers(controllers);
measurementController.initialize();
}
private void initInstrumentationController() throws InstrumentationException {
String measurementEnvironmentFile = GlobalConfiguration.getInstance().getProperty(
ConfigKeys.MEASUREMENT_ENVIRONMENT_FILE);
if (measurementEnvironmentFile == null) {
throw new InstrumentationException("Measurement Environment File has not been specified!");
}
List<ISpotterInstrumentation> instrumentations = MeasurementEnvironmentFactory.getInstance()
.createInstrumentationControllers(measurementEnvironmentFile);
instrumentationController = InstrumentationBroker.getInstance();
((InstrumentationBroker) instrumentationController).setControllers(instrumentations);
instrumentationController.initialize();
}
protected void executeDefaultExperimentSeries(Class<? extends IDetectionController> detectionControllerClass,
int numExperimentSteps, InstrumentationDescription instDescription) throws InstrumentationException,
MeasurementException, WorkloadException {
instrumentApplication(instDescription);
int maxUsers = Integer.parseInt(LpeStringUtils.getPropertyOrFail(GlobalConfiguration.getInstance()
.getProperties(), ConfigKeys.WORKLOAD_MAXUSERS, null));
if (numExperimentSteps <= 1) {
runExperiment(detectionControllerClass, maxUsers);
} else {
improveEstimatedDuration(numExperimentSteps);
double dMinUsers = MIN_NUM_USERS;
double dMaxUsers = maxUsers;
double dStep = (dMaxUsers - dMinUsers) / (double) (numExperimentSteps - 1);
// if we have the same number of maximum and minimum users, then we have only one experimetn run
if (dStep <= 0.0 + 0.0001) {
runExperiment(detectionControllerClass, MIN_NUM_USERS);
} else {
for (double dUsers = dMinUsers; dUsers <= (dMaxUsers + EPSELON); dUsers += dStep) {
int numUsers = new Double(dUsers).intValue();
runExperiment(detectionControllerClass, numUsers);
}
}
}
uninstrumentApplication();
}
protected void instrumentApplication(InstrumentationDescription instDescription) throws InstrumentationException {
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.INSTRUMENTING);
long instrumentationStart = System.currentTimeMillis();
InstrumentationDescriptionBuilder descriptionBuilder = new InstrumentationDescriptionBuilder();
descriptionBuilder.appendOtherDescription(instDescription);
for (IExperimentReuser reuser : experimentReuser) {
descriptionBuilder.appendOtherDescription(reuser.getInstrumentationDescription());
}
instrumentationController.instrument(descriptionBuilder.build());
instrumented = true;
additionalDuration += (System.currentTimeMillis() - instrumentationStart) / KILO;
}
protected void uninstrumentApplication() throws InstrumentationException {
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.UNINSTRUMENTING);
long uninstrumentationStart = System.currentTimeMillis();
instrumentationController.uninstrument();
instrumented = false;
additionalDuration += (System.currentTimeMillis() - uninstrumentationStart) / KILO;
}
protected void runExperiment(Class<? extends IDetectionController> detectionControllerClass, int numUsers)
throws WorkloadException, MeasurementException {
LOGGER.info("{} started experiment with {} users ...", detectionControllerClass.getSimpleName(), numUsers);
Properties wlProperties = new Properties();
wlProperties.setProperty(IWorkloadAdapter.NUMBER_CURRENT_USERS, String.valueOf(numUsers));
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.EXPERIMENTING_RAMP_UP);
workloadAdapter.startLoad(wlProperties);
workloadAdapter.waitForWarmupPhaseTermination();
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.EXPERIMENTING_STABLE_PHASE);
measurementController.enableMonitoring();
workloadAdapter.waitForExperimentPhaseTermination();
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.EXPERIMENTING_COOL_DOWN);
measurementController.disableMonitoring();
workloadAdapter.waitForFinishedLoad();
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.COLLECTING_DATA);
LOGGER.info("Storing data ...");
long dataCollectionStart = System.currentTimeMillis();
Parameter numOfUsersParameter = new Parameter(NUMBER_OF_USERS, numUsers);
Set<Parameter> parameters = new TreeSet<>();
parameters.add(numOfUsersParameter);
storeResults(parameters);
additionalDuration += (System.currentTimeMillis() - dataCollectionStart) / KILO;
LOGGER.info("Data stored!");
}
protected void overwriteDataPath(String dataDirectory) {
dataPath = dataDirectory;
}
protected void storeImageChartResource(Chart chart, String fileName, SpotterResult spotterResult) {
String resourceName = fileName + ".png";
String filePath = getAdditionalResourcesPath() + resourceName;
try {
BitmapEncoder.savePNGWithDPI(chart, filePath, DPI);
} catch (IOException e) {
// just ignore
return;
}
spotterResult.addResourceFile(resourceName);
}
protected void storeTextResource(final String fileName, final SpotterResult spotterResult,
final InputStream inStream) {
new Thread(new Runnable() {
@Override
public void run() {
String filePath = getAdditionalResourcesPath() + fileName + ".txt";
BufferedWriter bWriter = null;
BufferedReader bReader = null;
try {
FileWriter fWriter = new FileWriter(filePath);
bWriter = new BufferedWriter(fWriter);
bReader = new BufferedReader(new InputStreamReader(inStream));
String line = bReader.readLine();
while (line != null) {
bWriter.write(line);
bWriter.newLine();
line = bReader.readLine();
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (bWriter != null) {
bWriter.close();
}
if (bReader != null) {
bReader.close();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
spotterResult.addResourceFile(filePath);
}
}).start();
}
protected void storeResults(Set<Parameter> parameters) throws MeasurementException {
try {
experimentCount++;
final String path = getExperimentPath(experimentCount);
final PipedOutputStream outStream = new PipedOutputStream();
final PipedInputStream inStream = new PipedInputStream(outStream);
new Thread(new Runnable() {
@Override
public void run() {
try {
measurementController.pipeToOutputStream(outStream);
} catch (MeasurementException e) {
throw new RuntimeException("Failed Storing data!");
}
}
}).start();
RecordCSVWriter.getInstance().pipeDataToDatasetFiles(inStream, path, parameters);
measurementController.storeReport(path);
} catch (IOException e) {
throw new RuntimeException("Failed Storing data!");
}
}
private String getExperimentPath(int experimentCount) {
StringBuilder pathBuilder = new StringBuilder(getDataPath());
pathBuilder.append(String.valueOf(experimentCount));
pathBuilder.append(System.getProperty("file.separator"));
return pathBuilder.toString();
}
@Override
public String getDataPath() {
StringBuilder pathBuilder = new StringBuilder();
if (dataPath == null) {
pathBuilder.append(GlobalConfiguration.getInstance().getProperty(ConfigKeys.RESULT_DIR));
pathBuilder.append(getProvider().getName());
pathBuilder.append(System.getProperty("file.separator"));
pathBuilder.append(ResultsLocationConstants.CSV_SUB_DIR);
pathBuilder.append(System.getProperty("file.separator"));
dataPath = pathBuilder.toString();
} else {
pathBuilder.append(dataPath);
}
return pathBuilder.toString();
}
protected String getAdditionalResourcesPath() {
StringBuilder pathBuilder = new StringBuilder();
if (resourcePath == null) {
pathBuilder.append(GlobalConfiguration.getInstance().getProperty(ConfigKeys.RESULT_DIR));
pathBuilder.append(getProvider().getName());
pathBuilder.append(System.getProperty("file.separator"));
pathBuilder.append(ResultsLocationConstants.RESULT_RESOURCES_SUB_DIR);
pathBuilder.append(System.getProperty("file.separator"));
resourcePath = pathBuilder.toString();
File file = new File(resourcePath);
if (!file.exists()) {
LpeFileUtils.createDir(resourcePath);
}
} else {
pathBuilder.append(resourcePath);
}
return pathBuilder.toString();
}
protected DatasetCollection loadData() {
return RecordCSVReader.getInstance().readDatasetCollectionFromDirectory(dataPath);
}
protected abstract void executeExperiments() throws InstrumentationException, MeasurementException,
WorkloadException;
protected abstract int getNumOfExperiments();
protected abstract SpotterResult analyze(DatasetCollection data);
private void calculateInitialEstimatedDuration() {
int numExperiments = getNumOfExperiments();
long numUsers = GlobalConfiguration.getInstance().getPropertyAsLong(ConfigKeys.WORKLOAD_MAXUSERS, 1L);
estimatedDuration = calculateExperimentDuration(numUsers) * numExperiments;
}
private long calculateExperimentDuration(long numUsers) {
long rampUpUsersPerInterval = GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.EXPERIMENT_RAMP_UP_NUM_USERS_PER_INTERVAL, 0L);
long coolDownUsersPerInterval = GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.EXPERIMENT_COOL_DOWN_NUM_USERS_PER_INTERVAL, 0L);
long rampUpInterval = GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.EXPERIMENT_RAMP_UP_INTERVAL_LENGTH, 0L);
long coolDownInterval = GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.EXPERIMENT_COOL_DOWN_INTERVAL_LENGTH, 0L);
long stablePhase = GlobalConfiguration.getInstance().getPropertyAsLong(ConfigKeys.EXPERIMENT_DURATION, 0L);
long rampUp = 0;
if (rampUpUsersPerInterval != 0) {
rampUp = (numUsers / rampUpUsersPerInterval) * rampUpInterval;
}
long coolDown = 0;
if (coolDownUsersPerInterval != 0) {
coolDown = (numUsers / coolDownUsersPerInterval) * coolDownInterval;
}
return rampUp + stablePhase + coolDown;
}
private void improveEstimatedDuration(int numExperimentSteps) {
double dMinUsers = MIN_NUM_USERS;
long maxUsers = GlobalConfiguration.getInstance().getPropertyAsLong(ConfigKeys.WORKLOAD_MAXUSERS, 1L);
double dMaxUsers = maxUsers;
double dStep = (dMaxUsers - dMinUsers) / (double) (numExperimentSteps - 1);
long duration = 0L;
if (dStep <= 0.0 + 0.0001) {
duration += calculateExperimentDuration(new Double(dMinUsers).intValue());
} else {
for (double dUsers = dMinUsers; dUsers <= dMaxUsers; dUsers += dStep) {
int numUsers = new Double(dUsers).intValue();
duration += calculateExperimentDuration(numUsers);
}
}
estimatedDuration = duration;
}
/**
* @return the problem detection configuration
*/
public Properties getProblemDetectionConfiguration() {
return problemDetectionConfiguration;
}
/**
* Sets the configuration for problem detection.
*
* @param problemDetectionConfiguration
* the new properties
*/
public void setProblemDetectionConfiguration(Properties problemDetectionConfiguration) {
this.problemDetectionConfiguration = problemDetectionConfiguration;
}
@Override
public void addExperimentReuser(IExperimentReuser reuser) {
experimentReuser.add(reuser);
}
@Override
public void setParentDataDir(String readDataFrom) {
this.parentDataDir = readDataFrom;
}
/**
* @return the estimatedOverallDuration
*/
public long getEstimatedOverallDuration() {
return estimatedDuration + additionalDuration;
}
}
| org.spotter.core/src/org/spotter/core/detection/AbstractDetectionController.java | /**
* Copyright 2014 SAP AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.spotter.core.detection;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import org.aim.api.exceptions.InstrumentationException;
import org.aim.api.exceptions.MeasurementException;
import org.aim.api.instrumentation.description.InstrumentationDescription;
import org.aim.api.instrumentation.description.InstrumentationDescriptionBuilder;
import org.aim.api.measurement.dataset.DatasetCollection;
import org.aim.api.measurement.dataset.Parameter;
import org.aim.api.measurement.utils.RecordCSVReader;
import org.aim.api.measurement.utils.RecordCSVWriter;
import org.lpe.common.config.GlobalConfiguration;
import org.lpe.common.extension.AbstractExtensionArtifact;
import org.lpe.common.extension.IExtension;
import org.lpe.common.util.LpeFileUtils;
import org.lpe.common.util.LpeStringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spotter.core.Spotter;
import org.spotter.core.config.interpretation.MeasurementEnvironmentFactory;
import org.spotter.core.instrumentation.ISpotterInstrumentation;
import org.spotter.core.instrumentation.InstrumentationBroker;
import org.spotter.core.measurement.IMeasurementController;
import org.spotter.core.measurement.MeasurementBroker;
import org.spotter.core.workload.IWorkloadAdapter;
import org.spotter.core.workload.WorkloadAdapterBroker;
import org.spotter.exceptions.WorkloadException;
import org.spotter.shared.configuration.ConfigKeys;
import org.spotter.shared.result.ResultsLocationConstants;
import org.spotter.shared.result.model.SpotterResult;
import org.spotter.shared.status.DiagnosisStatus;
import com.xeiam.xchart.BitmapEncoder;
import com.xeiam.xchart.Chart;
/**
* The {@link AbstractDetectionController} comprises common functionality of all
* detection controller classes, like initialization, result persistance, etc.
*
* @author Alexander Wert
*
*/
public abstract class AbstractDetectionController extends AbstractExtensionArtifact implements IDetectionController {
private static final double EPSELON = 0.5;
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractDetectionController.class);
/**
* property key for detection name.
*/
public static final long KILO = 1000L;
public static final String DETECTABLE_KEY = "org.spotter.detection.detectable";
private static final int SUT_WARMPUP_DURATION = GlobalConfiguration.getInstance().getPropertyAsInteger(ConfigKeys.PREWARUMUP_DURATION, 180);
private static final int MIN_NUM_USERS = 1;
protected static final String NUMBER_OF_USERS = "numUsers";
protected static final String EXPERIMENT_STEPS_KEY = "numExperimentSteps";
private static final int DPI = 300;
private ISpotterInstrumentation instrumentationController;
protected IMeasurementController measurementController;
protected IWorkloadAdapter workloadAdapter;
protected boolean instrumented = false;
private boolean sutWarmedUp = false;
private Properties problemDetectionConfiguration = new Properties();
private int experimentCount = 0;
private String dataPath;
private String resourcePath;
private List<IExperimentReuser> experimentReuser;
private String parentDataDir;
protected long measurementRampUpTime;
private long estimatedDuration = 0L;
protected long additionalDuration = 0L;
/**
* Constructor.
*
* @param provider
* Provider of the extension.
*/
public AbstractDetectionController(IExtension<IDetectionController> provider) {
super(provider);
experimentReuser = new ArrayList<>();
}
@Override
public SpotterResult analyzeProblem() throws InstrumentationException, MeasurementException, WorkloadException {
try {
if (!GlobalConfiguration.getInstance().getPropertyAsBoolean(ConfigKeys.OMIT_WARMUP, false)) {
additionalDuration += SUT_WARMPUP_DURATION;
}
calculateInitialEstimatedDuration();
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.INITIALIZING);
if (GlobalConfiguration.getInstance().getPropertyAsBoolean(ConfigKeys.OMIT_EXPERIMENTS, false)) {
overwriteDataPath(GlobalConfiguration.getInstance().getProperty(ConfigKeys.DUMMY_EXPERIMENT_DATA));
} else if (this instanceof IExperimentReuser) {
overwriteDataPath(parentDataDir);
} else {
initialize();
if (!GlobalConfiguration.getInstance().getPropertyAsBoolean(ConfigKeys.OMIT_WARMUP, false)) {
warmUpSUT();
}
executeExperiments();
}
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.ANALYSING);
return analyze(loadData());
} catch (Exception e) {
if (e instanceof InstrumentationException) {
throw (InstrumentationException) e;
} else {
if (instrumented) {
instrumentationController.uninstrument();
}
instrumented = false;
String message = "Error during problem analysis by " + this.getClass().getSimpleName()
+ ". Ignoring and resuming!";
LOGGER.warn(message + " Cause: {}", e);
SpotterResult result = new SpotterResult();
result.addMessage(message);
result.setDetected(false);
return result;
}
}
}
/**
* Updates the current progress of this controller.
*/
public void updateEstimatedProgress() {
long elapsedTime = (System.currentTimeMillis() - GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.PPD_RUN_TIMESTAMP, 0L))
/ KILO;
long currentEstimatedOverallDuration = getEstimatedOverallDuration();
// as the estimated overall duration might not have been calculated yet
// and return default
// value 0, it must be checked to be greater 0
if (currentEstimatedOverallDuration > 0) {
Spotter.getInstance()
.getProgress()
.updateProgress(getProvider().getName(), (double) (elapsedTime / getEstimatedOverallDuration()),
getEstimatedOverallDuration() - elapsedTime);
}
}
/**
* This method triggers the load generators to put low load on the system
* under test in order to warm it up. E.g. all required classes of the SUT
* should be loaded.
*
* @throws WorkloadException
*/
private void warmUpSUT() throws WorkloadException {
if (!sutWarmedUp) {
Spotter.getInstance().getProgress().updateProgressStatus(getProvider().getName(), DiagnosisStatus.WARM_UP);
Properties wlProperties = new Properties();
wlProperties.setProperty(IWorkloadAdapter.NUMBER_CURRENT_USERS, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_RAMP_UP_INTERVAL_LENGTH, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_RAMP_UP_NUM_USERS_PER_INTERVAL, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_COOL_DOWN_INTERVAL_LENGTH, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_COOL_DOWN_NUM_USERS_PER_INTERVAL, String.valueOf(1));
wlProperties.setProperty(ConfigKeys.EXPERIMENT_DURATION, String.valueOf(SUT_WARMPUP_DURATION));
workloadAdapter.startLoad(wlProperties);
workloadAdapter.waitForFinishedLoad();
sutWarmedUp = true;
}
}
private void initialize() throws MeasurementException, InstrumentationException, WorkloadException {
Spotter.getInstance().getProgress()
.updateProgressMessage(getProvider().getName(), "Initializing measurement environment...");
long startInitialization = System.currentTimeMillis();
initInstrumentationController();
initMeasurementController();
initWorkloadAdapter();
additionalDuration += (System.currentTimeMillis() - startInitialization) / KILO;
}
private void initWorkloadAdapter() throws WorkloadException {
String measurementEnvironmentFile = GlobalConfiguration.getInstance().getProperty(
ConfigKeys.MEASUREMENT_ENVIRONMENT_FILE);
if (measurementEnvironmentFile == null) {
throw new WorkloadException("Measurement Environment File has not been specified!");
}
List<IWorkloadAdapter> wlAdapters = MeasurementEnvironmentFactory.getInstance().createWorkloadAdapters(
measurementEnvironmentFile);
workloadAdapter = WorkloadAdapterBroker.getInstance();
((WorkloadAdapterBroker) workloadAdapter).setControllers(wlAdapters);
workloadAdapter.initialize();
}
private void initMeasurementController() throws InstrumentationException, MeasurementException {
String measurementEnvironmentFile = GlobalConfiguration.getInstance().getProperty(
ConfigKeys.MEASUREMENT_ENVIRONMENT_FILE);
if (measurementEnvironmentFile == null) {
throw new InstrumentationException("Measurement Environment File has not been specified!");
}
List<IMeasurementController> controllers = MeasurementEnvironmentFactory.getInstance()
.createMeasurementControllers(measurementEnvironmentFile);
measurementController = MeasurementBroker.getInstance();
((MeasurementBroker) measurementController).setControllers(controllers);
measurementController.initialize();
}
private void initInstrumentationController() throws InstrumentationException {
String measurementEnvironmentFile = GlobalConfiguration.getInstance().getProperty(
ConfigKeys.MEASUREMENT_ENVIRONMENT_FILE);
if (measurementEnvironmentFile == null) {
throw new InstrumentationException("Measurement Environment File has not been specified!");
}
List<ISpotterInstrumentation> instrumentations = MeasurementEnvironmentFactory.getInstance()
.createInstrumentationControllers(measurementEnvironmentFile);
instrumentationController = InstrumentationBroker.getInstance();
((InstrumentationBroker) instrumentationController).setControllers(instrumentations);
instrumentationController.initialize();
}
protected void executeDefaultExperimentSeries(Class<? extends IDetectionController> detectionControllerClass,
int numExperimentSteps, InstrumentationDescription instDescription) throws InstrumentationException,
MeasurementException, WorkloadException {
instrumentApplication(instDescription);
int maxUsers = Integer.parseInt(LpeStringUtils.getPropertyOrFail(GlobalConfiguration.getInstance()
.getProperties(), ConfigKeys.WORKLOAD_MAXUSERS, null));
if (numExperimentSteps <= 1) {
runExperiment(detectionControllerClass, maxUsers);
} else {
improveEstimatedDuration(numExperimentSteps);
double dMinUsers = MIN_NUM_USERS;
double dMaxUsers = maxUsers;
double dStep = (dMaxUsers - dMinUsers) / (double) (numExperimentSteps - 1);
for (double dUsers = dMinUsers; dUsers <= (dMaxUsers + EPSELON); dUsers += dStep) {
int numUsers = new Double(dUsers).intValue();
runExperiment(detectionControllerClass, numUsers);
}
}
uninstrumentApplication();
}
protected void instrumentApplication(InstrumentationDescription instDescription) throws InstrumentationException {
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.INSTRUMENTING);
long instrumentationStart = System.currentTimeMillis();
InstrumentationDescriptionBuilder descriptionBuilder = new InstrumentationDescriptionBuilder();
descriptionBuilder.appendOtherDescription(instDescription);
for (IExperimentReuser reuser : experimentReuser) {
descriptionBuilder.appendOtherDescription(reuser.getInstrumentationDescription());
}
instrumentationController.instrument(descriptionBuilder.build());
instrumented = true;
additionalDuration += (System.currentTimeMillis() - instrumentationStart) / KILO;
}
protected void uninstrumentApplication() throws InstrumentationException {
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.UNINSTRUMENTING);
long uninstrumentationStart = System.currentTimeMillis();
instrumentationController.uninstrument();
instrumented = false;
additionalDuration += (System.currentTimeMillis() - uninstrumentationStart) / KILO;
}
protected void runExperiment(Class<? extends IDetectionController> detectionControllerClass, int numUsers)
throws WorkloadException, MeasurementException {
LOGGER.info("{} started experiment with {} users ...", detectionControllerClass.getSimpleName(), numUsers);
Properties wlProperties = new Properties();
wlProperties.setProperty(IWorkloadAdapter.NUMBER_CURRENT_USERS, String.valueOf(numUsers));
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.EXPERIMENTING_RAMP_UP);
workloadAdapter.startLoad(wlProperties);
workloadAdapter.waitForWarmupPhaseTermination();
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.EXPERIMENTING_STABLE_PHASE);
measurementController.enableMonitoring();
workloadAdapter.waitForExperimentPhaseTermination();
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.EXPERIMENTING_COOL_DOWN);
measurementController.disableMonitoring();
workloadAdapter.waitForFinishedLoad();
Spotter.getInstance().getProgress()
.updateProgressStatus(getProvider().getName(), DiagnosisStatus.COLLECTING_DATA);
LOGGER.info("Storing data ...");
long dataCollectionStart = System.currentTimeMillis();
Parameter numOfUsersParameter = new Parameter(NUMBER_OF_USERS, numUsers);
Set<Parameter> parameters = new TreeSet<>();
parameters.add(numOfUsersParameter);
storeResults(parameters);
additionalDuration += (System.currentTimeMillis() - dataCollectionStart) / KILO;
LOGGER.info("Data stored!");
}
protected void overwriteDataPath(String dataDirectory) {
dataPath = dataDirectory;
}
protected void storeImageChartResource(Chart chart, String fileName, SpotterResult spotterResult) {
String resourceName = fileName + ".png";
String filePath = getAdditionalResourcesPath() + resourceName;
try {
BitmapEncoder.savePNGWithDPI(chart, filePath, DPI);
} catch (IOException e) {
// just ignore
return;
}
spotterResult.addResourceFile(resourceName);
}
protected void storeTextResource(final String fileName, final SpotterResult spotterResult,
final InputStream inStream) {
new Thread(new Runnable() {
@Override
public void run() {
String filePath = getAdditionalResourcesPath() + fileName + ".txt";
BufferedWriter bWriter = null;
BufferedReader bReader = null;
try {
FileWriter fWriter = new FileWriter(filePath);
bWriter = new BufferedWriter(fWriter);
bReader = new BufferedReader(new InputStreamReader(inStream));
String line = bReader.readLine();
while (line != null) {
bWriter.write(line);
bWriter.newLine();
line = bReader.readLine();
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
if (bWriter != null) {
bWriter.close();
}
if (bReader != null) {
bReader.close();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
spotterResult.addResourceFile(filePath);
}
}).start();
}
protected void storeResults(Set<Parameter> parameters) throws MeasurementException {
try {
experimentCount++;
final String path = getExperimentPath(experimentCount);
final PipedOutputStream outStream = new PipedOutputStream();
final PipedInputStream inStream = new PipedInputStream(outStream);
new Thread(new Runnable() {
@Override
public void run() {
try {
measurementController.pipeToOutputStream(outStream);
} catch (MeasurementException e) {
throw new RuntimeException("Failed Storing data!");
}
}
}).start();
RecordCSVWriter.getInstance().pipeDataToDatasetFiles(inStream, path, parameters);
measurementController.storeReport(path);
} catch (IOException e) {
throw new RuntimeException("Failed Storing data!");
}
}
private String getExperimentPath(int experimentCount) {
StringBuilder pathBuilder = new StringBuilder(getDataPath());
pathBuilder.append(String.valueOf(experimentCount));
pathBuilder.append(System.getProperty("file.separator"));
return pathBuilder.toString();
}
@Override
public String getDataPath() {
StringBuilder pathBuilder = new StringBuilder();
if (dataPath == null) {
pathBuilder.append(GlobalConfiguration.getInstance().getProperty(ConfigKeys.RESULT_DIR));
pathBuilder.append(getProvider().getName());
pathBuilder.append(System.getProperty("file.separator"));
pathBuilder.append(ResultsLocationConstants.CSV_SUB_DIR);
pathBuilder.append(System.getProperty("file.separator"));
dataPath = pathBuilder.toString();
} else {
pathBuilder.append(dataPath);
}
return pathBuilder.toString();
}
protected String getAdditionalResourcesPath() {
StringBuilder pathBuilder = new StringBuilder();
if (resourcePath == null) {
pathBuilder.append(GlobalConfiguration.getInstance().getProperty(ConfigKeys.RESULT_DIR));
pathBuilder.append(getProvider().getName());
pathBuilder.append(System.getProperty("file.separator"));
pathBuilder.append(ResultsLocationConstants.RESULT_RESOURCES_SUB_DIR);
pathBuilder.append(System.getProperty("file.separator"));
resourcePath = pathBuilder.toString();
File file = new File(resourcePath);
if (!file.exists()) {
LpeFileUtils.createDir(resourcePath);
}
} else {
pathBuilder.append(resourcePath);
}
return pathBuilder.toString();
}
protected DatasetCollection loadData() {
return RecordCSVReader.getInstance().readDatasetCollectionFromDirectory(dataPath);
}
protected abstract void executeExperiments() throws InstrumentationException, MeasurementException,
WorkloadException;
protected abstract int getNumOfExperiments();
protected abstract SpotterResult analyze(DatasetCollection data);
private void calculateInitialEstimatedDuration() {
int numExperiments = getNumOfExperiments();
long numUsers = GlobalConfiguration.getInstance().getPropertyAsLong(ConfigKeys.WORKLOAD_MAXUSERS, 1L);
estimatedDuration = calculateExperimentDuration(numUsers) * numExperiments;
}
private long calculateExperimentDuration(long numUsers) {
long rampUpUsersPerInterval = GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.EXPERIMENT_RAMP_UP_NUM_USERS_PER_INTERVAL, 0L);
long coolDownUsersPerInterval = GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.EXPERIMENT_COOL_DOWN_NUM_USERS_PER_INTERVAL, 0L);
long rampUpInterval = GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.EXPERIMENT_RAMP_UP_INTERVAL_LENGTH, 0L);
long coolDownInterval = GlobalConfiguration.getInstance().getPropertyAsLong(
ConfigKeys.EXPERIMENT_COOL_DOWN_INTERVAL_LENGTH, 0L);
long stablePhase = GlobalConfiguration.getInstance().getPropertyAsLong(ConfigKeys.EXPERIMENT_DURATION, 0L);
long rampUp = 0;
if (rampUpUsersPerInterval != 0) {
rampUp = (numUsers / rampUpUsersPerInterval) * rampUpInterval;
}
long coolDown = 0;
if (coolDownUsersPerInterval != 0) {
coolDown = (numUsers / coolDownUsersPerInterval) * coolDownInterval;
}
return rampUp + stablePhase + coolDown;
}
private void improveEstimatedDuration(int numExperimentSteps) {
double dMinUsers = MIN_NUM_USERS;
long maxUsers = GlobalConfiguration.getInstance().getPropertyAsLong(ConfigKeys.WORKLOAD_MAXUSERS, 1L);
double dMaxUsers = maxUsers;
double dStep = (dMaxUsers - dMinUsers) / (double) (numExperimentSteps - 1);
long duration = 0L;
if (dStep <= 0.0 + 0.0001) {
duration += calculateExperimentDuration(new Double(dMinUsers).intValue());
} else {
for (double dUsers = dMinUsers; dUsers <= dMaxUsers; dUsers += dStep) {
int numUsers = new Double(dUsers).intValue();
duration += calculateExperimentDuration(numUsers);
}
}
estimatedDuration = duration;
}
/**
* @return the problem detection configuration
*/
public Properties getProblemDetectionConfiguration() {
return problemDetectionConfiguration;
}
/**
* Sets the configuration for problem detection.
*
* @param problemDetectionConfiguration
* the new properties
*/
public void setProblemDetectionConfiguration(Properties problemDetectionConfiguration) {
this.problemDetectionConfiguration = problemDetectionConfiguration;
}
@Override
public void addExperimentReuser(IExperimentReuser reuser) {
experimentReuser.add(reuser);
}
@Override
public void setParentDataDir(String readDataFrom) {
this.parentDataDir = readDataFrom;
}
/**
* @return the estimatedOverallDuration
*/
public long getEstimatedOverallDuration() {
return estimatedDuration + additionalDuration;
}
}
| Fixed bug when calculating the amount of experiments which are executed.
When the step size was 0, infinite experiments were executed.
When the max user number and min user number is the same, only one
experiment is executed.
Change-Id: I7a2c14e90fc7d36afb046d4bd58ce8850607252d
Signed-off-by: Peter Merkert <[email protected]> | org.spotter.core/src/org/spotter/core/detection/AbstractDetectionController.java | Fixed bug when calculating the amount of experiments which are executed. When the step size was 0, infinite experiments were executed. |
|
Java | apache-2.0 | adad5ec76fe485dcdada7874b01af1396060b76d | 0 | Praveen2112/presto,treasure-data/presto,y-lan/presto,joy-yao/presto,facebook/presto,cberner/presto,aleph-zero/presto,erichwang/presto,harunurhan/presto,fipar/presto,dongjoon-hyun/presto,gh351135612/presto,aramesh117/presto,tomz/presto,troels/nz-presto,propene/presto,wangcan2014/presto,mcanthony/presto,smartpcr/presto,aglne/presto,fipar/presto,geraint0923/presto,dongjoon-hyun/presto,tellproject/presto,mvp/presto,jiangyifangh/presto,yu-yamada/presto,kaschaeffer/presto,avasilevskiy/presto,pwz3n0/presto,kietly/presto,wyukawa/presto,stewartpark/presto,troels/nz-presto,Teradata/presto,twitter-forks/presto,wagnermarkd/presto,toyama0919/presto,deciament/presto,jf367/presto,y-lan/presto,prestodb/presto,facebook/presto,tomz/presto,ajoabraham/presto,mugglmenzel/presto,cawallin/presto,hgschmie/presto,cberner/presto,Yaliang/presto,avasilevskiy/presto,zofuthan/presto,dain/presto,jacobgao/presto,raghavsethi/presto,Praveen2112/presto,miniway/presto,haozhun/presto,electrum/presto,ipros-team/presto,fiedukow/presto,ArturGajowy/presto,takari/presto,raghavsethi/presto,arhimondr/presto,sopel39/presto,haitaoyao/presto,nsabharwal/presto,dabaitu/presto,Yaliang/presto,smartpcr/presto,ebd2/presto,yuananf/presto,shixuan-fan/presto,treasure-data/presto,haozhun/presto,stewartpark/presto,elonazoulay/presto,TeradataCenterForHadoop/bootcamp,Jimexist/presto,kingland/presto,kined/presto,yuananf/presto,hgschmie/presto,shixuan-fan/presto,bloomberg/presto,pwz3n0/presto,DanielTing/presto,ipros-team/presto,aleph-zero/presto,DanielTing/presto,harunurhan/presto,prateek1306/presto,mpilman/presto,chrisunder/presto,aglne/presto,siddhartharay007/presto,nileema/presto,Jimexist/presto,geraint0923/presto,ocono-tech/presto,siddhartharay007/presto,ebd2/presto,smartpcr/presto,dabaitu/presto,raghavsethi/presto,springning/presto,TeradataCenterForHadoop/bootcamp,martint/presto,treasure-data/presto,hulu/presto,yuananf/presto,mcanthony/presto,wrmsr/presto,totticarter/presto,shubham166/presto,Nasdaq/presto,sunchao/presto,soz-fb/presto,geraint0923/presto,mode/presto,electrum/presto,zjshen/presto,elonazoulay/presto,y-lan/presto,erichwang/presto,bloomberg/presto,siddhartharay007/presto,avasilevskiy/presto,sopel39/presto,miquelruiz/presto,twitter-forks/presto,Yaliang/presto,EvilMcJerkface/presto,XiaominZhang/presto,cosinequanon/presto,xiangel/presto,smartnews/presto,springning/presto,sumitkgec/presto,rockerbox/presto,chrisunder/presto,rockerbox/presto,CHINA-JD/presto,y-lan/presto,Myrthan/presto,zhenyuy-fb/presto,ebd2/presto,tellproject/presto,treasure-data/presto,11xor6/presto,kingland/presto,sunchao/presto,haitaoyao/presto,sumitkgec/presto,mpilman/presto,totticarter/presto,mcanthony/presto,wagnermarkd/presto,miquelruiz/presto,jxiang/presto,mode/presto,EvilMcJerkface/presto,martint/presto,ArturGajowy/presto,elonazoulay/presto,ptkool/presto,11xor6/presto,tomz/presto,jf367/presto,bloomberg/presto,jxiang/presto,ArturGajowy/presto,jacobgao/presto,ebyhr/presto,yuananf/presto,nsabharwal/presto,shubham166/presto,elonazoulay/presto,aleph-zero/presto,hulu/presto,joy-yao/presto,ocono-tech/presto,CHINA-JD/presto,mandusm/presto,gcnonato/presto,XiaominZhang/presto,pwz3n0/presto,sumanth232/presto,dongjoon-hyun/presto,stewartpark/presto,toyama0919/presto,saidalaoui/presto,nakajijiji/presto,svstanev/presto,troels/nz-presto,sumitkgec/presto,tomz/presto,idemura/presto,zhenxiao/presto,twitter-forks/presto,takari/presto,jiangyifangh/presto,zofuthan/presto,haozhun/presto,ebd2/presto,deciament/presto,mandusm/presto,wangcan2014/presto,wyukawa/presto,jekey/presto,mono-plane/presto,bloomberg/presto,EvilMcJerkface/presto,gcnonato/presto,Svjard/presto,zjshen/presto,propene/presto,ajoabraham/presto,idemura/presto,jiekechoo/presto,idemura/presto,sumanth232/presto,prestodb/presto,gh351135612/presto,troels/nz-presto,arhimondr/presto,mugglmenzel/presto,joshk/presto,dain/presto,aglne/presto,zzhao0/presto,yu-yamada/presto,elonazoulay/presto,dain/presto,Nasdaq/presto,prateek1306/presto,ArturGajowy/presto,Svjard/presto,nileema/presto,tellproject/presto,facebook/presto,kingland/presto,mcanthony/presto,losipiuk/presto,losipiuk/presto,idemura/presto,mattyb149/presto,mode/presto,jekey/presto,zhenyuy-fb/presto,saidalaoui/presto,mode/presto,shixuan-fan/presto,mvp/presto,xiangel/presto,Zoomdata/presto,springning/presto,zofuthan/presto,zjshen/presto,haozhun/presto,fengshao0907/presto,lingochamp/presto,jiangyifangh/presto,avasilevskiy/presto,mpilman/presto,smartpcr/presto,springning/presto,svstanev/presto,prateek1306/presto,mbeitchman/presto,ArturGajowy/presto,dabaitu/presto,fengshao0907/presto,harunurhan/presto,geraint0923/presto,pnowojski/presto,Jimexist/presto,jxiang/presto,sunchao/presto,kuzemchik/presto,lingochamp/presto,prateek1306/presto,RobinUS2/presto,rockerbox/presto,RobinUS2/presto,mpilman/presto,Nasdaq/presto,wagnermarkd/presto,miniway/presto,fiedukow/presto,kingland/presto,ptkool/presto,jxiang/presto,albertocsm/presto,youngwookim/presto,nvoron23/presto,kined/presto,11xor6/presto,ptkool/presto,nezihyigitbasi/presto,miniway/presto,kuzemchik/presto,lingochamp/presto,nakajijiji/presto,mattyb149/presto,pnowojski/presto,zhenyuy-fb/presto,kined/presto,svstanev/presto,kaschaeffer/presto,lingochamp/presto,suyucs/presto,nakajijiji/presto,ocono-tech/presto,albertocsm/presto,mugglmenzel/presto,shubham166/presto,wagnermarkd/presto,electrum/presto,propene/presto,raghavsethi/presto,Praveen2112/presto,aglne/presto,ebyhr/presto,cosinequanon/presto,facebook/presto,wagnermarkd/presto,ocono-tech/presto,ajoabraham/presto,Myrthan/presto,Yaliang/presto,TeradataCenterForHadoop/bootcamp,erichwang/presto,kietly/presto,smartnews/presto,fiedukow/presto,zofuthan/presto,siddhartharay007/presto,hulu/presto,aglne/presto,stewartpark/presto,losipiuk/presto,nezihyigitbasi/presto,takari/presto,rockerbox/presto,takari/presto,fiedukow/presto,gh351135612/presto,joshk/presto,zhenyuy-fb/presto,y-lan/presto,avasilevskiy/presto,pnowojski/presto,smartnews/presto,nileema/presto,joshk/presto,zjshen/presto,yu-yamada/presto,nvoron23/presto,kingland/presto,arhimondr/presto,dongjoon-hyun/presto,wyukawa/presto,nileema/presto,Zoomdata/presto,chrisunder/presto,svstanev/presto,nsabharwal/presto,ipros-team/presto,mono-plane/presto,EvilMcJerkface/presto,wangcan2014/presto,kietly/presto,facebook/presto,cawallin/presto,youngwookim/presto,Myrthan/presto,jxiang/presto,aramesh117/presto,fengshao0907/presto,Teradata/presto,arhimondr/presto,losipiuk/presto,suyucs/presto,Myrthan/presto,dongjoon-hyun/presto,suyucs/presto,ajoabraham/presto,yuananf/presto,arhimondr/presto,deciament/presto,smartnews/presto,aleph-zero/presto,gh351135612/presto,shixuan-fan/presto,TeradataCenterForHadoop/bootcamp,mattyb149/presto,nezihyigitbasi/presto,aleph-zero/presto,rockerbox/presto,sunchao/presto,siddhartharay007/presto,xiangel/presto,hgschmie/presto,chrisunder/presto,mcanthony/presto,CHINA-JD/presto,wangcan2014/presto,mandusm/presto,mattyb149/presto,Svjard/presto,tellproject/presto,totticarter/presto,nileema/presto,dabaitu/presto,Zoomdata/presto,smartpcr/presto,toxeh/presto,kuzemchik/presto,Svjard/presto,hulu/presto,zhenxiao/presto,troels/nz-presto,damiencarol/presto,vermaravikant/presto,shubham166/presto,prestodb/presto,CHINA-JD/presto,jiekechoo/presto,aramesh117/presto,mvp/presto,nvoron23/presto,cawallin/presto,zzhao0/presto,mbeitchman/presto,Zoomdata/presto,Praveen2112/presto,damiencarol/presto,cawallin/presto,Jimexist/presto,ebyhr/presto,TeradataCenterForHadoop/bootcamp,propene/presto,miquelruiz/presto,denizdemir/presto,zhenyuy-fb/presto,mugglmenzel/presto,sumanth232/presto,nezihyigitbasi/presto,Nasdaq/presto,electrum/presto,Teradata/presto,joshk/presto,idemura/presto,fipar/presto,jacobgao/presto,kined/presto,mugglmenzel/presto,pnowojski/presto,aramesh117/presto,zhenxiao/presto,toxeh/presto,kuzemchik/presto,wrmsr/presto,erichwang/presto,XiaominZhang/presto,pwz3n0/presto,soz-fb/presto,saidalaoui/presto,mbeitchman/presto,zofuthan/presto,Jimexist/presto,lingochamp/presto,springning/presto,smartnews/presto,mpilman/presto,wrmsr/presto,deciament/presto,Zoomdata/presto,prestodb/presto,martint/presto,shixuan-fan/presto,soz-fb/presto,wyukawa/presto,vermaravikant/presto,mattyb149/presto,cawallin/presto,gh351135612/presto,miniway/presto,denizdemir/presto,soz-fb/presto,jiekechoo/presto,haitaoyao/presto,martint/presto,zzhao0/presto,haozhun/presto,youngwookim/presto,totticarter/presto,haitaoyao/presto,wrmsr/presto,RobinUS2/presto,zzhao0/presto,fiedukow/presto,nakajijiji/presto,jiangyifangh/presto,deciament/presto,raghavsethi/presto,denizdemir/presto,kaschaeffer/presto,fipar/presto,wyukawa/presto,XiaominZhang/presto,jiangyifangh/presto,vermaravikant/presto,toyama0919/presto,albertocsm/presto,ptkool/presto,RobinUS2/presto,fipar/presto,sumanth232/presto,saidalaoui/presto,zhenxiao/presto,twitter-forks/presto,mvp/presto,tomz/presto,twitter-forks/presto,DanielTing/presto,propene/presto,kuzemchik/presto,RobinUS2/presto,nezihyigitbasi/presto,haitaoyao/presto,XiaominZhang/presto,Praveen2112/presto,ipros-team/presto,kaschaeffer/presto,geraint0923/presto,cosinequanon/presto,dabaitu/presto,losipiuk/presto,ptkool/presto,mode/presto,damiencarol/presto,xiangel/presto,dain/presto,joy-yao/presto,hgschmie/presto,tellproject/presto,bloomberg/presto,damiencarol/presto,prestodb/presto,nsabharwal/presto,sopel39/presto,sopel39/presto,sumanth232/presto,gcnonato/presto,cosinequanon/presto,DanielTing/presto,zzhao0/presto,totticarter/presto,jf367/presto,ajoabraham/presto,martint/presto,sopel39/presto,suyucs/presto,toxeh/presto,kaschaeffer/presto,jacobgao/presto,Myrthan/presto,toxeh/presto,prestodb/presto,nsabharwal/presto,hgschmie/presto,mono-plane/presto,kietly/presto,Teradata/presto,cberner/presto,soz-fb/presto,harunurhan/presto,erichwang/presto,mbeitchman/presto,Nasdaq/presto,toyama0919/presto,albertocsm/presto,yu-yamada/presto,pwz3n0/presto,11xor6/presto,kietly/presto,CHINA-JD/presto,mono-plane/presto,ebd2/presto,cberner/presto,mpilman/presto,prateek1306/presto,jekey/presto,jiekechoo/presto,zjshen/presto,ebyhr/presto,cosinequanon/presto,vermaravikant/presto,joy-yao/presto,ocono-tech/presto,joy-yao/presto,youngwookim/presto,svstanev/presto,ebyhr/presto,stewartpark/presto,toxeh/presto,wrmsr/presto,aramesh117/presto,Yaliang/presto,takari/presto,electrum/presto,jekey/presto,sumitkgec/presto,treasure-data/presto,hulu/presto,saidalaoui/presto,Svjard/presto,jiekechoo/presto,fengshao0907/presto,mandusm/presto,xiangel/presto,jf367/presto,fengshao0907/presto,treasure-data/presto,wangcan2014/presto,miniway/presto,Teradata/presto,sunchao/presto,mbeitchman/presto,miquelruiz/presto,ipros-team/presto,jekey/presto,sumitkgec/presto,albertocsm/presto,gcnonato/presto,vermaravikant/presto,toyama0919/presto,dain/presto,damiencarol/presto,11xor6/presto,chrisunder/presto,jf367/presto,tellproject/presto,harunurhan/presto,cberner/presto,nakajijiji/presto,nvoron23/presto,yu-yamada/presto,miquelruiz/presto,DanielTing/presto,mvp/presto,denizdemir/presto,mandusm/presto,EvilMcJerkface/presto,wrmsr/presto,kined/presto,suyucs/presto,youngwookim/presto | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.failureDetector;
import com.facebook.presto.util.IterableTransformer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import io.airlift.discovery.client.ServiceDescriptor;
import io.airlift.discovery.client.ServiceSelector;
import io.airlift.discovery.client.ServiceType;
import io.airlift.http.client.HttpClient;
import io.airlift.http.client.Request;
import io.airlift.http.client.Response;
import io.airlift.http.client.ResponseHandler;
import io.airlift.log.Logger;
import io.airlift.node.NodeInfo;
import io.airlift.stats.DecayCounter;
import io.airlift.stats.ExponentialDecay;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import org.weakref.jmx.Managed;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Predicates.compose;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.http.client.Request.Builder.prepareHead;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
public class HeartbeatFailureDetector
implements FailureDetector
{
private static final Logger log = Logger.get(HeartbeatFailureDetector.class);
private final ServiceSelector selector;
private final HttpClient httpClient;
private final NodeInfo nodeInfo;
private final ScheduledExecutorService executor = newSingleThreadScheduledExecutor(daemonThreadsNamed("failure-detector"));
// monitoring tasks by service id
private final ConcurrentMap<UUID, MonitoringTask> tasks = new ConcurrentHashMap<>();
private final double failureRatioThreshold;
private final Duration heartbeat;
private final boolean isEnabled;
private final Duration warmupInterval;
private final Duration gcGraceInterval;
private final AtomicBoolean started = new AtomicBoolean();
@Inject
public HeartbeatFailureDetector(
@ServiceType("presto") ServiceSelector selector,
@ForFailureDetector HttpClient httpClient,
FailureDetectorConfig config,
NodeInfo nodeInfo)
{
checkNotNull(selector, "selector is null");
checkNotNull(httpClient, "httpClient is null");
checkNotNull(nodeInfo, "nodeInfo is null");
checkNotNull(config, "config is null");
checkArgument(config.getHeartbeatInterval().toMillis() >= 1, "heartbeat interval must be >= 1ms");
this.selector = selector;
this.httpClient = httpClient;
this.nodeInfo = nodeInfo;
this.failureRatioThreshold = config.getFailureRatioThreshold();
this.heartbeat = config.getHeartbeatInterval();
this.warmupInterval = config.getWarmupInterval();
this.gcGraceInterval = config.getExpirationGraceInterval();
this.isEnabled = config.isEnabled();
}
@PostConstruct
public void start()
{
if (isEnabled && started.compareAndSet(false, true)) {
executor.scheduleWithFixedDelay(new Runnable()
{
@Override
public void run()
{
try {
updateMonitoredServices();
}
catch (Throwable e) {
// ignore to avoid getting unscheduled
log.warn(e, "Error updating services");
}
}
}, 0, 5, TimeUnit.SECONDS);
}
}
@PreDestroy
public void shutdown()
{
executor.shutdownNow();
}
@Override
public Set<ServiceDescriptor> getFailed()
{
return IterableTransformer.on(tasks.values())
.select(MonitoringTask::isFailed)
.transform(MonitoringTask::getService)
.set();
}
@Managed(description = "Number of failed services")
public int getFailedCount()
{
return getFailed().size();
}
@Managed(description = "Total number of known services")
public int getTotalCount()
{
return tasks.size();
}
@Managed
public int getActiveCount()
{
return tasks.size() - getFailed().size();
}
public Map<ServiceDescriptor, Stats> getStats()
{
ImmutableMap.Builder<ServiceDescriptor, Stats> builder = ImmutableMap.builder();
for (MonitoringTask task : tasks.values()) {
builder.put(task.getService(), task.getStats());
}
return builder.build();
}
@VisibleForTesting
void updateMonitoredServices()
{
Set<ServiceDescriptor> online = IterableTransformer.on(selector.selectAllServices())
.select(not(descriptor -> nodeInfo.getNodeId().equals(descriptor.getNodeId())))
.set();
Set<UUID> onlineIds = IterableTransformer.on(online)
.transform(ServiceDescriptor::getId)
.set();
// make sure only one thread is updating the registrations
synchronized (tasks) {
// 1. remove expired tasks
List<UUID> expiredIds = IterableTransformer.on(tasks.values())
.select(MonitoringTask::isExpired)
.transform(MonitoringTask::getService)
.transform(ServiceDescriptor::getId)
.list();
tasks.keySet().removeAll(expiredIds);
// 2. disable offline services
Iterable<MonitoringTask> toDisable = IterableTransformer.on(tasks.values())
.select(compose(not(in(onlineIds)), task -> task.getService().getId()))
.all();
for (MonitoringTask task : toDisable) {
task.disable();
}
// 3. create tasks for new services
Set<ServiceDescriptor> newServices = IterableTransformer.on(online)
.select(compose(not(in(tasks.keySet())), ServiceDescriptor::getId))
.set();
for (ServiceDescriptor service : newServices) {
URI uri = getHttpUri(service);
if (uri != null) {
tasks.put(service.getId(), new MonitoringTask(service, uri));
}
}
// 4. enable all online tasks (existing plus newly created)
Iterable<MonitoringTask> toEnable = IterableTransformer.on(tasks.values())
.select(compose(in(onlineIds), task -> task.getService().getId()))
.all();
for (MonitoringTask task : toEnable) {
task.enable();
}
}
}
private static URI getHttpUri(ServiceDescriptor service)
{
try {
String uri = service.getProperties().get("http");
if (uri != null) {
return new URI(uri);
}
}
catch (URISyntaxException e) {
// ignore, not a valid http uri
}
return null;
}
@ThreadSafe
private class MonitoringTask
{
private final ServiceDescriptor service;
private final URI uri;
private final Stats stats;
@GuardedBy("this")
private ScheduledFuture<?> future;
@GuardedBy("this")
private Long disabledTimestamp;
@GuardedBy("this")
private Long successTransitionTimestamp;
private MonitoringTask(ServiceDescriptor service, URI uri)
{
this.uri = uri;
this.service = service;
this.stats = new Stats(uri);
}
public Stats getStats()
{
return stats;
}
public ServiceDescriptor getService()
{
return service;
}
public synchronized void enable()
{
if (future == null) {
future = executor.scheduleAtFixedRate(new Runnable()
{
@Override
public void run()
{
try {
ping();
updateState();
}
catch (Throwable e) {
// ignore to avoid getting unscheduled
log.warn(e, "Error pinging service %s (%s)", service.getId(), uri);
}
}
}, heartbeat.toMillis(), heartbeat.toMillis(), TimeUnit.MILLISECONDS);
disabledTimestamp = null;
}
}
public synchronized void disable()
{
if (future != null) {
future.cancel(true);
future = null;
disabledTimestamp = System.nanoTime();
}
}
public synchronized boolean isExpired()
{
return future == null && disabledTimestamp != null && Duration.nanosSince(disabledTimestamp).compareTo(gcGraceInterval) > 0;
}
public synchronized boolean isFailed()
{
return future == null || // are we disabled?
successTransitionTimestamp == null || // are we in success state?
Duration.nanosSince(successTransitionTimestamp).compareTo(warmupInterval) < 0; // are we within the warmup period?
}
private void ping()
{
try {
stats.recordStart();
httpClient.executeAsync(prepareHead().setUri(uri).build(), new ResponseHandler<Object, Exception>()
{
@Override
public Exception handleException(Request request, Exception exception)
{
// ignore error
stats.recordFailure(exception);
// TODO: this will technically cause an NPE in httpClient, but it's not triggered because
// we never call get() on the response future. This behavior needs to be fixed in airlift
return null;
}
@Override
public Object handle(Request request, Response response)
throws Exception
{
stats.recordSuccess();
return null;
}
});
}
catch (RuntimeException e) {
log.warn(e, "Error scheduling request for %s", uri);
}
}
private synchronized void updateState()
{
// is this an over/under transition?
if (stats.getRecentFailureRatio() > failureRatioThreshold) {
successTransitionTimestamp = null;
}
else if (successTransitionTimestamp == null) {
successTransitionTimestamp = System.nanoTime();
}
}
}
public static class Stats
{
private final long start = System.nanoTime();
private final URI uri;
private final DecayCounter recentRequests = new DecayCounter(ExponentialDecay.oneMinute());
private final DecayCounter recentFailures = new DecayCounter(ExponentialDecay.oneMinute());
private final DecayCounter recentSuccesses = new DecayCounter(ExponentialDecay.oneMinute());
private final AtomicReference<DateTime> lastRequestTime = new AtomicReference<>();
private final AtomicReference<DateTime> lastResponseTime = new AtomicReference<>();
@GuardedBy("this")
private final Map<Class<? extends Throwable>, DecayCounter> failureCountByType = new HashMap<>();
public Stats(URI uri)
{
this.uri = uri;
}
public void recordStart()
{
recentRequests.add(1);
lastRequestTime.set(new DateTime());
}
public void recordSuccess()
{
recentSuccesses.add(1);
lastResponseTime.set(new DateTime());
}
public void recordFailure(Exception exception)
{
recentFailures.add(1);
lastResponseTime.set(new DateTime());
Throwable cause = exception;
while (cause.getClass() == RuntimeException.class && cause.getCause() != null) {
cause = cause.getCause();
}
synchronized (this) {
DecayCounter counter = failureCountByType.get(cause.getClass());
if (counter == null) {
counter = new DecayCounter(ExponentialDecay.oneMinute());
failureCountByType.put(cause.getClass(), counter);
}
counter.add(1);
}
}
@JsonProperty
public Duration getAge()
{
return Duration.nanosSince(start);
}
@JsonProperty
public URI getUri()
{
return uri;
}
@JsonProperty
public double getRecentFailures()
{
return recentFailures.getCount();
}
@JsonProperty
public double getRecentSuccesses()
{
return recentSuccesses.getCount();
}
@JsonProperty
public double getRecentRequests()
{
return recentRequests.getCount();
}
@JsonProperty
public double getRecentFailureRatio()
{
return recentFailures.getCount() / recentRequests.getCount();
}
@JsonProperty
public DateTime getLastRequestTime()
{
return lastRequestTime.get();
}
@JsonProperty
public DateTime getLastResponseTime()
{
return lastResponseTime.get();
}
@JsonProperty
public synchronized Map<String, Double> getRecentFailuresByType()
{
ImmutableMap.Builder<String, Double> builder = ImmutableMap.builder();
for (Map.Entry<Class<? extends Throwable>, DecayCounter> entry : failureCountByType.entrySet()) {
builder.put(entry.getKey().getName(), entry.getValue().getCount());
}
return builder.build();
}
}
}
| presto-main/src/main/java/com/facebook/presto/failureDetector/HeartbeatFailureDetector.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.failureDetector;
import com.facebook.presto.util.IterableTransformer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import io.airlift.discovery.client.ServiceDescriptor;
import io.airlift.discovery.client.ServiceSelector;
import io.airlift.discovery.client.ServiceType;
import io.airlift.http.client.HttpClient;
import io.airlift.http.client.Request;
import io.airlift.http.client.Response;
import io.airlift.http.client.ResponseHandler;
import io.airlift.log.Logger;
import io.airlift.node.NodeInfo;
import io.airlift.stats.DecayCounter;
import io.airlift.stats.ExponentialDecay;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import org.weakref.jmx.Managed;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Predicates.compose;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.http.client.Request.Builder.prepareHead;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
public class HeartbeatFailureDetector
implements FailureDetector
{
private static final Logger log = Logger.get(HeartbeatFailureDetector.class);
private final ServiceSelector selector;
private final HttpClient httpClient;
private final NodeInfo nodeInfo;
private final ScheduledExecutorService executor = newSingleThreadScheduledExecutor(daemonThreadsNamed("failure-detector"));
// monitoring tasks by service id
private final ConcurrentMap<UUID, MonitoringTask> tasks = new ConcurrentHashMap<>();
private final double failureRatioThreshold;
private final Duration heartbeat;
private final boolean isEnabled;
private final Duration warmupInterval;
private final Duration gcGraceInterval;
private final AtomicBoolean started = new AtomicBoolean();
@Inject
public HeartbeatFailureDetector(
@ServiceType("presto") ServiceSelector selector,
@ForFailureDetector HttpClient httpClient,
FailureDetectorConfig config,
NodeInfo nodeInfo)
{
checkNotNull(selector, "selector is null");
checkNotNull(httpClient, "httpClient is null");
checkNotNull(nodeInfo, "nodeInfo is null");
checkNotNull(config, "config is null");
checkArgument(config.getHeartbeatInterval().toMillis() >= 1, "heartbeat interval must be >= 1ms");
this.selector = selector;
this.httpClient = httpClient;
this.nodeInfo = nodeInfo;
this.failureRatioThreshold = config.getFailureRatioThreshold();
this.heartbeat = config.getHeartbeatInterval();
this.warmupInterval = config.getWarmupInterval();
this.gcGraceInterval = config.getExpirationGraceInterval();
this.isEnabled = config.isEnabled();
}
@PostConstruct
public void start()
{
if (isEnabled && started.compareAndSet(false, true)) {
executor.scheduleWithFixedDelay(new Runnable()
{
@Override
public void run()
{
try {
updateMonitoredServices();
}
catch (Throwable e) {
// ignore to avoid getting unscheduled
log.warn(e, "Error updating services");
}
}
}, 0, 5, TimeUnit.SECONDS);
}
}
@PreDestroy
public void shutdown()
{
executor.shutdownNow();
}
@Override
public Set<ServiceDescriptor> getFailed()
{
return IterableTransformer.on(tasks.values())
.select(isFailedPredicate())
.transform(serviceGetter())
.set();
}
@Managed(description = "Number of failed services")
public int getFailedCount()
{
return getFailed().size();
}
@Managed(description = "Total number of known services")
public int getTotalCount()
{
return tasks.size();
}
@Managed
public int getActiveCount()
{
return tasks.size() - getFailed().size();
}
public Map<ServiceDescriptor, Stats> getStats()
{
ImmutableMap.Builder<ServiceDescriptor, Stats> builder = ImmutableMap.builder();
for (MonitoringTask task : tasks.values()) {
builder.put(task.getService(), task.getStats());
}
return builder.build();
}
@VisibleForTesting
void updateMonitoredServices()
{
Set<ServiceDescriptor> online = IterableTransformer.on(selector.selectAllServices())
.select(not(serviceDescriptorHasNodeId(nodeInfo.getNodeId())))
.set();
Set<UUID> onlineIds = IterableTransformer.on(online)
.transform(idGetter())
.set();
// make sure only one thread is updating the registrations
synchronized (tasks) {
// 1. remove expired tasks
List<UUID> expiredIds = IterableTransformer.on(tasks.values())
.select(isExpiredPredicate())
.transform(serviceIdGetter())
.list();
tasks.keySet().removeAll(expiredIds);
// 2. disable offline services
Iterable<MonitoringTask> toDisable = IterableTransformer.on(tasks.values())
.select(compose(not(in(onlineIds)), serviceIdGetter()))
.all();
for (MonitoringTask task : toDisable) {
task.disable();
}
// 3. create tasks for new services
Set<ServiceDescriptor> newServices = IterableTransformer.on(online)
.select(compose(not(in(tasks.keySet())), idGetter()))
.set();
for (ServiceDescriptor service : newServices) {
URI uri = getHttpUri(service);
if (uri != null) {
tasks.put(service.getId(), new MonitoringTask(service, uri));
}
}
// 4. enable all online tasks (existing plus newly created)
Iterable<MonitoringTask> toEnable = IterableTransformer.on(tasks.values())
.select(compose(in(onlineIds), serviceIdGetter()))
.all();
for (MonitoringTask task : toEnable) {
task.enable();
}
}
}
private static URI getHttpUri(ServiceDescriptor service)
{
try {
String uri = service.getProperties().get("http");
if (uri != null) {
return new URI(uri);
}
}
catch (URISyntaxException e) {
// ignore, not a valid http uri
}
return null;
}
private static Predicate<ServiceDescriptor> serviceDescriptorHasNodeId(final String nodeId)
{
checkNotNull(nodeId, "nodeId is null");
return new Predicate<ServiceDescriptor>()
{
@Override
public boolean apply(ServiceDescriptor descriptor)
{
return nodeId.equals(descriptor.getNodeId());
}
};
}
private static Function<ServiceDescriptor, UUID> idGetter()
{
return new Function<ServiceDescriptor, UUID>()
{
@Override
public UUID apply(ServiceDescriptor descriptor)
{
return descriptor.getId();
}
};
}
private static Function<MonitoringTask, ServiceDescriptor> serviceGetter()
{
return new Function<MonitoringTask, ServiceDescriptor>()
{
@Override
public ServiceDescriptor apply(MonitoringTask task)
{
return task.getService();
}
};
}
private static Function<MonitoringTask, UUID> serviceIdGetter()
{
return new Function<MonitoringTask, UUID>()
{
@Override
public UUID apply(MonitoringTask task)
{
return task.getService().getId();
}
};
}
private static Predicate<MonitoringTask> isExpiredPredicate()
{
return new Predicate<MonitoringTask>()
{
@Override
public boolean apply(MonitoringTask task)
{
return task.isExpired();
}
};
}
private static Predicate<MonitoringTask> isFailedPredicate()
{
return new Predicate<MonitoringTask>()
{
@Override
public boolean apply(MonitoringTask task)
{
return task.isFailed();
}
};
}
@ThreadSafe
private class MonitoringTask
{
private final ServiceDescriptor service;
private final URI uri;
private final Stats stats;
@GuardedBy("this")
private ScheduledFuture<?> future;
@GuardedBy("this")
private Long disabledTimestamp;
@GuardedBy("this")
private Long successTransitionTimestamp;
private MonitoringTask(ServiceDescriptor service, URI uri)
{
this.uri = uri;
this.service = service;
this.stats = new Stats(uri);
}
public Stats getStats()
{
return stats;
}
public ServiceDescriptor getService()
{
return service;
}
public synchronized void enable()
{
if (future == null) {
future = executor.scheduleAtFixedRate(new Runnable()
{
@Override
public void run()
{
try {
ping();
updateState();
}
catch (Throwable e) {
// ignore to avoid getting unscheduled
log.warn(e, "Error pinging service %s (%s)", service.getId(), uri);
}
}
}, heartbeat.toMillis(), heartbeat.toMillis(), TimeUnit.MILLISECONDS);
disabledTimestamp = null;
}
}
public synchronized void disable()
{
if (future != null) {
future.cancel(true);
future = null;
disabledTimestamp = System.nanoTime();
}
}
public synchronized boolean isExpired()
{
return future == null && disabledTimestamp != null && Duration.nanosSince(disabledTimestamp).compareTo(gcGraceInterval) > 0;
}
public synchronized boolean isFailed()
{
return future == null || // are we disabled?
successTransitionTimestamp == null || // are we in success state?
Duration.nanosSince(successTransitionTimestamp).compareTo(warmupInterval) < 0; // are we within the warmup period?
}
private void ping()
{
try {
stats.recordStart();
httpClient.executeAsync(prepareHead().setUri(uri).build(), new ResponseHandler<Object, Exception>()
{
@Override
public Exception handleException(Request request, Exception exception)
{
// ignore error
stats.recordFailure(exception);
// TODO: this will technically cause an NPE in httpClient, but it's not triggered because
// we never call get() on the response future. This behavior needs to be fixed in airlift
return null;
}
@Override
public Object handle(Request request, Response response)
throws Exception
{
stats.recordSuccess();
return null;
}
});
}
catch (RuntimeException e) {
log.warn(e, "Error scheduling request for %s", uri);
}
}
private synchronized void updateState()
{
// is this an over/under transition?
if (stats.getRecentFailureRatio() > failureRatioThreshold) {
successTransitionTimestamp = null;
}
else if (successTransitionTimestamp == null) {
successTransitionTimestamp = System.nanoTime();
}
}
}
public static class Stats
{
private final long start = System.nanoTime();
private final URI uri;
private final DecayCounter recentRequests = new DecayCounter(ExponentialDecay.oneMinute());
private final DecayCounter recentFailures = new DecayCounter(ExponentialDecay.oneMinute());
private final DecayCounter recentSuccesses = new DecayCounter(ExponentialDecay.oneMinute());
private final AtomicReference<DateTime> lastRequestTime = new AtomicReference<>();
private final AtomicReference<DateTime> lastResponseTime = new AtomicReference<>();
@GuardedBy("this")
private final Map<Class<? extends Throwable>, DecayCounter> failureCountByType = new HashMap<>();
public Stats(URI uri)
{
this.uri = uri;
}
public void recordStart()
{
recentRequests.add(1);
lastRequestTime.set(new DateTime());
}
public void recordSuccess()
{
recentSuccesses.add(1);
lastResponseTime.set(new DateTime());
}
public void recordFailure(Exception exception)
{
recentFailures.add(1);
lastResponseTime.set(new DateTime());
Throwable cause = exception;
while (cause.getClass() == RuntimeException.class && cause.getCause() != null) {
cause = cause.getCause();
}
synchronized (this) {
DecayCounter counter = failureCountByType.get(cause.getClass());
if (counter == null) {
counter = new DecayCounter(ExponentialDecay.oneMinute());
failureCountByType.put(cause.getClass(), counter);
}
counter.add(1);
}
}
@JsonProperty
public Duration getAge()
{
return Duration.nanosSince(start);
}
@JsonProperty
public URI getUri()
{
return uri;
}
@JsonProperty
public double getRecentFailures()
{
return recentFailures.getCount();
}
@JsonProperty
public double getRecentSuccesses()
{
return recentSuccesses.getCount();
}
@JsonProperty
public double getRecentRequests()
{
return recentRequests.getCount();
}
@JsonProperty
public double getRecentFailureRatio()
{
return recentFailures.getCount() / recentRequests.getCount();
}
@JsonProperty
public DateTime getLastRequestTime()
{
return lastRequestTime.get();
}
@JsonProperty
public DateTime getLastResponseTime()
{
return lastResponseTime.get();
}
@JsonProperty
public synchronized Map<String, Double> getRecentFailuresByType()
{
ImmutableMap.Builder<String, Double> builder = ImmutableMap.builder();
for (Map.Entry<Class<? extends Throwable>, DecayCounter> entry : failureCountByType.entrySet()) {
builder.put(entry.getKey().getName(), entry.getValue().getCount());
}
return builder.build();
}
}
}
| Remove getter/predicate functions from HeartbeatFailureDetector
| presto-main/src/main/java/com/facebook/presto/failureDetector/HeartbeatFailureDetector.java | Remove getter/predicate functions from HeartbeatFailureDetector |
|
Java | apache-2.0 | 9fd513d268679005d6430337ed8595a08727c1c2 | 0 | openedbox/feign,spencergibb/feign,Netflix/feign,spencergibb/feign,zhoupan/feign,olkulyk/feign,bstick12/feign,OpenFeign/feign,jmnarloch/feign,bstick12/feign,openedbox/feign,Echo19890615/feign,didvae/feign,zhoupan/feign,gorcz/feign,OpenFeign/feign,ddteeter/feign,OpenFeign/feign,FrEaKmAn/feign,didvae/feign,olkulyk/feign,Netflix/feign,prembasumatary/feign,prembasumatary/feign,gorcz/feign,ddteeter/feign,Echo19890615/feign,FrEaKmAn/feign,jmnarloch/feign | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package feign;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.logging.FileHandler;
import java.util.logging.LogRecord;
import java.util.logging.SimpleFormatter;
import static feign.Util.UTF_8;
import static feign.Util.ensureClosed;
import static feign.Util.valuesOrEmpty;
/**
* Simple logging abstraction for debug messages. Adapted from {@code retrofit.RestAdapter.Log}.
*/
public abstract class Logger {
/**
* Controls the level of logging.
*/
public enum Level {
/**
* No logging.
*/
NONE,
/**
* Log only the request method and URL and the response status code and execution time.
*/
BASIC,
/**
* Log the basic information along with request and response headers.
*/
HEADERS,
/**
* Log the headers, body, and metadata for both requests and responses.
*/
FULL
}
/**
* logs to the category {@link Logger} at {@link java.util.logging.Level#FINE}.
*/
public static class ErrorLogger extends Logger {
final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(Logger.class.getName());
@Override protected void log(Target<?> target, String format, Object... args) {
System.err.printf(format + "%n", args);
}
}
/**
* logs to the category {@link Logger} at {@link java.util.logging.Level#FINE}, if loggable.
*/
public static class JavaLogger extends Logger {
final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(Logger.class.getName());
@Override void logRequest(Target<?> target, Level logLevel, Request request) {
if (logger.isLoggable(java.util.logging.Level.FINE)) {
super.logRequest(target, logLevel, request);
}
}
@Override
Response logAndRebufferResponse(Target<?> target, Level logLevel, Response response, long elapsedTime) throws IOException {
if (logger.isLoggable(java.util.logging.Level.FINE)) {
return super.logAndRebufferResponse(target, logLevel, response, elapsedTime);
}
return response;
}
@Override protected void log(Target<?> target, String format, Object... args) {
logger.fine(String.format(format, args));
}
/**
* helper that configures jul to sanely log messages at FINE level without additional formatting.
*/
public JavaLogger appendToFile(String logfile) {
logger.setLevel(java.util.logging.Level.FINE);
try {
FileHandler handler = new FileHandler(logfile, true);
handler.setFormatter(new SimpleFormatter() {
@Override
public String format(LogRecord record) {
return String.format("%s%n", record.getMessage()); // NOPMD
}
});
logger.addHandler(handler);
} catch (IOException e) {
throw new IllegalStateException("Could not add file handler.", e);
}
return this;
}
}
public static class NoOpLogger extends Logger {
@Override void logRequest(Target<?> target, Level logLevel, Request request) {
}
@Override
Response logAndRebufferResponse(Target<?> target, Level logLevel, Response response, long elapsedTime) throws IOException {
return response;
}
@Override
protected void log(Target<?> target, String format, Object... args) {
}
}
/**
* Override to log requests and responses using your own implementation.
* Messages will be http request and response text.
*
* @param target useful if using MDC (Mapped Diagnostic Context) loggers
* @param format {@link java.util.Formatter format string}
* @param args arguments applied to {@code format}
*/
protected abstract void log(Target<?> target, String format, Object... args);
void logRequest(Target<?> target, Level logLevel, Request request) {
log(target, "---> %s %s HTTP/1.1", request.method(), request.url());
if (logLevel.ordinal() >= Level.HEADERS.ordinal()) {
for (String field : request.headers().keySet()) {
for (String value : valuesOrEmpty(request.headers(), field)) {
log(target, "%s: %s", field, value);
}
}
int bytes = 0;
if (request.body() != null) {
bytes = request.body().getBytes(UTF_8).length;
if (logLevel.ordinal() >= Level.FULL.ordinal()) {
log(target, ""); // CRLF
log(target, "%s", request.body());
}
}
log(target, "---> END HTTP (%s-byte body)", bytes);
}
}
Response logAndRebufferResponse(Target<?> target, Level logLevel, Response response, long elapsedTime) throws IOException {
log(target, "<--- HTTP/1.1 %s %s (%sms)", response.status(), response.reason(), elapsedTime);
if (logLevel.ordinal() >= Level.HEADERS.ordinal()) {
for (String field : response.headers().keySet()) {
for (String value : valuesOrEmpty(response.headers(), field)) {
log(target, "%s: %s", field, value);
}
}
if (response.body() != null) {
if (logLevel.ordinal() >= Level.FULL.ordinal()) {
log(target, ""); // CRLF
}
Reader body = response.body().asReader();
try {
StringBuilder buffered = new StringBuilder();
BufferedReader reader = new BufferedReader(body);
String line;
while ((line = reader.readLine()) != null) {
buffered.append(line);
if (logLevel.ordinal() >= Level.FULL.ordinal()) {
log(target, "%s", line);
}
}
String bodyAsString = buffered.toString();
log(target, "<--- END HTTP (%s-byte body)", bodyAsString.getBytes(UTF_8).length);
return Response.create(response.status(), response.reason(), response.headers(), bodyAsString);
} finally {
ensureClosed(response.body());
}
}
}
return response;
}
}
| feign-core/src/main/java/feign/Logger.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package feign;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.text.SimpleDateFormat;
import java.util.logging.FileHandler;
import java.util.logging.LogRecord;
import java.util.logging.SimpleFormatter;
import static feign.Util.UTF_8;
import static feign.Util.ensureClosed;
import static feign.Util.valuesOrEmpty;
/**
* Simple logging abstraction for debug messages. Adapted from {@code retrofit.RestAdapter.Log}.
*/
public abstract class Logger {
/**
* Controls the level of logging.
*/
public enum Level {
/**
* No logging.
*/
NONE,
/**
* Log only the request method and URL and the response status code and execution time.
*/
BASIC,
/**
* Log the basic information along with request and response headers.
*/
HEADERS,
/**
* Log the headers, body, and metadata for both requests and responses.
*/
FULL
}
/**
* logs to the category {@link Logger} at {@link java.util.logging.Level#FINE}.
*/
public static class ErrorLogger extends Logger {
final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(Logger.class.getName());
@Override protected void log(Target<?> target, String format, Object... args) {
System.err.printf(format + "%n", args);
}
}
/**
* logs to the category {@link Logger} at {@link java.util.logging.Level#FINE}, if loggable.
*/
public static class JavaLogger extends Logger {
final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(Logger.class.getName());
@Override void logRequest(Target<?> target, Level logLevel, Request request) {
if (logger.isLoggable(java.util.logging.Level.FINE)) {
super.logRequest(target, logLevel, request);
}
}
@Override
Response logAndRebufferResponse(Target<?> target, Level logLevel, Response response, long elapsedTime) throws IOException {
if (logger.isLoggable(java.util.logging.Level.FINE)) {
return super.logAndRebufferResponse(target, logLevel, response, elapsedTime);
}
return response;
}
@Override protected void log(Target<?> target, String format, Object... args) {
logger.fine(String.format(format, args));
}
/**
* helper that configures jul to sanely log messages.
*/
public JavaLogger appendToFile(String logfile) {
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
logger.setLevel(java.util.logging.Level.FINE);
try {
FileHandler handler = new FileHandler(logfile, true);
handler.setFormatter(new SimpleFormatter() {
@Override
public String format(LogRecord record) {
String timestamp = sdf.format(new java.util.Date(record.getMillis())); // NOPMD
return String.format("%s %s%n", timestamp, record.getMessage()); // NOPMD
}
});
logger.addHandler(handler);
} catch (IOException e) {
throw new IllegalStateException("Could not add file handler.", e);
}
return this;
}
}
public static class NoOpLogger extends Logger {
@Override void logRequest(Target<?> target, Level logLevel, Request request) {
}
@Override
Response logAndRebufferResponse(Target<?> target, Level logLevel, Response response, long elapsedTime) throws IOException {
return response;
}
@Override
protected void log(Target<?> target, String format, Object... args) {
}
}
/**
* Override to log requests and responses using your own implementation.
* Messages will be http request and response text.
*
* @param target useful if using MDC (Mapped Diagnostic Context) loggers
* @param format {@link java.util.Formatter format string}
* @param args arguments applied to {@code format}
*/
protected abstract void log(Target<?> target, String format, Object... args);
void logRequest(Target<?> target, Level logLevel, Request request) {
log(target, "---> %s %s HTTP/1.1", request.method(), request.url());
if (logLevel.ordinal() >= Level.HEADERS.ordinal()) {
for (String field : request.headers().keySet()) {
for (String value : valuesOrEmpty(request.headers(), field)) {
log(target, "%s: %s", field, value);
}
}
int bytes = 0;
if (request.body() != null) {
bytes = request.body().getBytes(UTF_8).length;
if (logLevel.ordinal() >= Level.FULL.ordinal()) {
log(target, ""); // CRLF
log(target, "%s", request.body());
}
}
log(target, "---> END HTTP (%s-byte body)", bytes);
}
}
Response logAndRebufferResponse(Target<?> target, Level logLevel, Response response, long elapsedTime) throws IOException {
log(target, "<--- HTTP/1.1 %s %s (%sms)", response.status(), response.reason(), elapsedTime);
if (logLevel.ordinal() >= Level.HEADERS.ordinal()) {
for (String field : response.headers().keySet()) {
for (String value : valuesOrEmpty(response.headers(), field)) {
log(target, "%s: %s", field, value);
}
}
if (response.body() != null) {
if (logLevel.ordinal() >= Level.FULL.ordinal()) {
log(target, ""); // CRLF
}
Reader body = response.body().asReader();
try {
StringBuilder buffered = new StringBuilder();
BufferedReader reader = new BufferedReader(body);
String line;
while ((line = reader.readLine()) != null) {
buffered.append(line);
if (logLevel.ordinal() >= Level.FULL.ordinal()) {
log(target, "%s", line);
}
}
String bodyAsString = buffered.toString();
log(target, "<--- END HTTP (%s-byte body)", bodyAsString.getBytes(UTF_8).length);
return Response.create(response.status(), response.reason(), response.headers(), bodyAsString);
} finally {
ensureClosed(response.body());
}
}
}
return response;
}
}
| remove timestamp from log appender helper
| feign-core/src/main/java/feign/Logger.java | remove timestamp from log appender helper |
|
Java | apache-2.0 | e5f4b46f3f93f4d87e11f896e1e3c2804c1681ea | 0 | sandeep-n/incubator-systemml,niketanpansare/incubator-systemml,dusenberrymw/incubator-systemml,deroneriksson/systemml,deroneriksson/incubator-systemml,asurve/arvind-sysml2,nakul02/systemml,asurve/incubator-systemml,asurve/systemml,niketanpansare/incubator-systemml,deroneriksson/incubator-systemml,asurve/arvind-sysml2,dusenberrymw/systemml,nakul02/incubator-systemml,iyounus/incubator-systemml,deroneriksson/incubator-systemml,apache/incubator-systemml,deroneriksson/systemml,dusenberrymw/incubator-systemml,nakul02/systemml,deroneriksson/systemml,akchinSTC/systemml,Myasuka/systemml,Myasuka/systemml,nakul02/incubator-systemml,nakul02/systemml,iyounus/incubator-systemml,asurve/incubator-systemml,Myasuka/systemml,deroneriksson/incubator-systemml,iyounus/incubator-systemml,niketanpansare/systemml,gweidner/systemml,iyounus/incubator-systemml,dusenberrymw/systemml,akchinSTC/systemml,niketanpansare/systemml,dusenberrymw/incubator-systemml,apache/incubator-systemml,Wenpei/incubator-systemml,dusenberrymw/incubator-systemml,asurve/systemml,dusenberrymw/systemml,apache/incubator-systemml,nakul02/systemml,gweidner/systemml,Wenpei/incubator-systemml,gweidner/incubator-systemml,niketanpansare/incubator-systemml,dhutchis/systemml,dusenberrymw/systemml,iyounus/incubator-systemml,apache/incubator-systemml,deroneriksson/systemml,asurve/arvind-sysml2,niketanpansare/systemml,dusenberrymw/incubator-systemml,nakul02/incubator-systemml,iyounus/incubator-systemml,dhutchis/systemml,nakul02/systemml,gweidner/incubator-systemml,gweidner/systemml,asurve/systemml,asurve/incubator-systemml,Wenpei/incubator-systemml,asurve/incubator-systemml,akchinSTC/systemml,fschueler/incubator-systemml,asurve/arvind-sysml2,dhutchis/systemml,deroneriksson/incubator-systemml,fschueler/incubator-systemml,gweidner/incubator-systemml,nakul02/incubator-systemml,apache/incubator-systemml,sandeep-n/incubator-systemml,akchinSTC/systemml,niketanpansare/systemml,asurve/arvind-sysml2,gweidner/incubator-systemml,asurve/systemml,asurve/incubator-systemml,deroneriksson/systemml,Wenpei/incubator-systemml,fschueler/incubator-systemml,asurve/systemml,gweidner/incubator-systemml,deroneriksson/incubator-systemml,dusenberrymw/systemml,dusenberrymw/systemml,nakul02/incubator-systemml,dhutchis/systemml,dusenberrymw/incubator-systemml,dhutchis/systemml,fschueler/incubator-systemml,nakul02/systemml,sandeep-n/incubator-systemml,gweidner/systemml,dhutchis/systemml,akchinSTC/systemml,gweidner/systemml,niketanpansare/systemml,gweidner/incubator-systemml,Myasuka/systemml,deroneriksson/systemml,asurve/systemml,gweidner/systemml,niketanpansare/systemml,niketanpansare/incubator-systemml,asurve/arvind-sysml2,sandeep-n/incubator-systemml,Myasuka/systemml,nakul02/incubator-systemml,akchinSTC/systemml,asurve/incubator-systemml,Myasuka/systemml,apache/incubator-systemml | /**
* IBM Confidential
* OCO Source Materials
* (C) Copyright IBM Corp. 2010, 2015
* The source code for this program is not published or otherwise divested of its trade secrets, irrespective of what has been deposited with the U.S. Copyright Office.
*/
package com.ibm.bi.dml.runtime.matrix.data;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.math3.random.Well1024a;
import org.apache.hadoop.io.DataInputBuffer;
import com.ibm.bi.dml.conf.ConfigurationManager;
import com.ibm.bi.dml.conf.DMLConfig;
import com.ibm.bi.dml.lops.MMTSJ.MMTSJType;
import com.ibm.bi.dml.lops.MapMultChain.ChainType;
import com.ibm.bi.dml.lops.PartialAggregate.CorrectionLocationType;
import com.ibm.bi.dml.lops.WeightedSquaredLoss.WeightsType;
import com.ibm.bi.dml.parser.DMLTranslator;
import com.ibm.bi.dml.runtime.DMLRuntimeException;
import com.ibm.bi.dml.runtime.DMLUnsupportedOperationException;
import com.ibm.bi.dml.runtime.functionobjects.And;
import com.ibm.bi.dml.runtime.functionobjects.Builtin;
import com.ibm.bi.dml.runtime.functionobjects.CM;
import com.ibm.bi.dml.runtime.functionobjects.CTable;
import com.ibm.bi.dml.runtime.functionobjects.DiagIndex;
import com.ibm.bi.dml.runtime.functionobjects.KahanPlus;
import com.ibm.bi.dml.runtime.functionobjects.Multiply;
import com.ibm.bi.dml.runtime.functionobjects.Plus;
import com.ibm.bi.dml.runtime.functionobjects.ReduceAll;
import com.ibm.bi.dml.runtime.functionobjects.SortIndex;
import com.ibm.bi.dml.runtime.functionobjects.SwapIndex;
import com.ibm.bi.dml.runtime.instructions.cp.CM_COV_Object;
import com.ibm.bi.dml.runtime.instructions.cp.DoubleObject;
import com.ibm.bi.dml.runtime.instructions.cp.KahanObject;
import com.ibm.bi.dml.runtime.instructions.cp.ScalarObject;
import com.ibm.bi.dml.runtime.instructions.mr.RangeBasedReIndexInstruction.IndexRange;
import com.ibm.bi.dml.runtime.matrix.data.LibMatrixBincell.BinaryAccessType;
import com.ibm.bi.dml.runtime.matrix.mapred.IndexedMatrixValue;
import com.ibm.bi.dml.runtime.matrix.mapred.MRJobConfiguration;
import com.ibm.bi.dml.runtime.matrix.operators.AggregateBinaryOperator;
import com.ibm.bi.dml.runtime.matrix.operators.AggregateOperator;
import com.ibm.bi.dml.runtime.matrix.operators.AggregateUnaryOperator;
import com.ibm.bi.dml.runtime.matrix.operators.BinaryOperator;
import com.ibm.bi.dml.runtime.matrix.operators.CMOperator;
import com.ibm.bi.dml.runtime.matrix.operators.COVOperator;
import com.ibm.bi.dml.runtime.matrix.operators.Operator;
import com.ibm.bi.dml.runtime.matrix.operators.ReorgOperator;
import com.ibm.bi.dml.runtime.matrix.operators.ScalarOperator;
import com.ibm.bi.dml.runtime.matrix.operators.UnaryOperator;
import com.ibm.bi.dml.runtime.util.FastBufferedDataInputStream;
import com.ibm.bi.dml.runtime.util.UtilFunctions;
public class MatrixBlock extends MatrixValue implements Serializable
{
@SuppressWarnings("unused")
private static final String _COPYRIGHT = "Licensed Materials - Property of IBM\n(C) Copyright IBM Corp. 2010, 2015\n" +
"US Government Users Restricted Rights - Use, duplication disclosure restricted by GSA ADP Schedule Contract with IBM Corp.";
private static final long serialVersionUID = 7319972089143154056L;
//sparsity nnz threshold, based on practical experiments on space consumption and performance
public static final double SPARSITY_TURN_POINT = 0.4;
//sparsity threshold for ultra-sparse matrix operations (40nnz in a 1kx1k block)
public static final double ULTRA_SPARSITY_TURN_POINT = 0.00004;
//basic header (int rlen, int clen, byte type)
public static final int HEADER_SIZE = 9;
public enum BlockType{
EMPTY_BLOCK,
ULTRA_SPARSE_BLOCK, //ultra sparse representation, in-mem same as sparse
SPARSE_BLOCK, //sparse representation, see sparseRows
DENSE_BLOCK, //dense representation, see denseBlock
}
//matrix meta data
protected int rlen = -1;
protected int clen = -1;
protected boolean sparse = true;
protected long nonZeros = 0;
//matrix data (sparse or dense)
protected double[] denseBlock = null;
protected SparseRow[] sparseRows = null;
//sparse-block-specific attributes (allocation only)
protected int estimatedNNzsPerRow = -1;
//ctable-specific attributes
protected int maxrow = -1;
protected int maxcolumn = -1;
//grpaggregate-specific attributes (optional)
protected int numGroups = -1;
//diag-specific attributes (optional)
protected boolean diag = false;
////////
// Matrix Constructors
//
public MatrixBlock()
{
rlen = 0;
clen = 0;
sparse = true;
nonZeros = 0;
maxrow = 0;
maxcolumn = 0;
}
public MatrixBlock(int rl, int cl, boolean sp)
{
rlen = rl;
clen = cl;
sparse = sp;
nonZeros = 0;
maxrow = 0;
maxcolumn = 0;
}
public MatrixBlock(int rl, int cl, boolean sp, long estnnzs)
{
this(rl, cl, sp);
estimatedNNzsPerRow=(int)Math.ceil((double)estnnzs/(double)rl);
}
public MatrixBlock(MatrixBlock that)
{
this.copy(that);
}
////////
// Initialization methods
// (reset, init, allocate, etc)
public void reset()
{
reset(-rlen);
}
public void reset(long estnnzs)
{
estimatedNNzsPerRow=(int)Math.ceil((double)estnnzs/(double)rlen);
if(sparse)
{
resetSparse();
}
else
{
if(denseBlock!=null)
{
if(denseBlock.length<rlen*clen)
denseBlock=null;
else
Arrays.fill(denseBlock, 0, rlen*clen, 0);
}
}
nonZeros=0;
//operation-specific attributes
maxrow = rlen;
maxcolumn = clen;
numGroups = -1;
}
public void reset(int rl, int cl) {
rlen=rl;
clen=cl;
nonZeros=0;
reset();
}
public void reset(int rl, int cl, long estnnzs) {
rlen=rl;
clen=cl;
nonZeros=0;
reset(estnnzs);
}
public void reset(int rl, int cl, boolean sp)
{
sparse=sp;
reset(rl, cl);
}
public void reset(int rl, int cl, boolean sp, long estnnzs)
{
sparse=sp;
reset(rl, cl, estnnzs);
}
public void resetSparse()
{
if(sparseRows!=null)
{
for(int i=0; i<Math.min(rlen, sparseRows.length); i++)
if(sparseRows[i]!=null)
sparseRows[i].reset(estimatedNNzsPerRow, clen);
}
}
public void resetDenseWithValue(int rl, int cl, double v)
throws DMLRuntimeException
{
estimatedNNzsPerRow=-1;
rlen=rl;
clen=cl;
sparse=false;
if(v==0)
{
reset();
return;
}
//allocate dense block
allocateDenseBlock();
//init with constant value (non-zero, see above)
int limit = rlen * clen;
Arrays.fill(denseBlock, 0, limit, v);
nonZeros=limit;
}
/**
* NOTE: This method is designed only for dense representation.
*
* @param arr
* @param r
* @param c
* @throws DMLRuntimeException
*/
public void init(double[][] arr, int r, int c)
throws DMLRuntimeException
{
//input checks
if ( sparse )
throw new DMLRuntimeException("MatrixBlockDSM.init() can be invoked only on matrices with dense representation.");
if( r*c > rlen*clen )
throw new DMLRuntimeException("MatrixBlockDSM.init() invoked with too large dimensions ("+r+","+c+") vs ("+rlen+","+clen+")");
//allocate or resize dense block
allocateDenseBlock();
//copy and compute nnz
for(int i=0, ix=0; i < r; i++, ix+=clen)
System.arraycopy(arr[i], 0, denseBlock, ix, arr[i].length);
recomputeNonZeros();
maxrow = r;
maxcolumn = c;
}
/**
* NOTE: This method is designed only for dense representation.
*
* @param arr
* @param r
* @param c
* @throws DMLRuntimeException
*/
public void init(double[] arr, int r, int c)
throws DMLRuntimeException
{
//input checks
if ( sparse )
throw new DMLRuntimeException("MatrixBlockDSM.init() can be invoked only on matrices with dense representation.");
if( r*c > rlen*clen )
throw new DMLRuntimeException("MatrixBlockDSM.init() invoked with too large dimensions ("+r+","+c+") vs ("+rlen+","+clen+")");
//allocate or resize dense block
allocateDenseBlock();
//copy and compute nnz
System.arraycopy(arr, 0, denseBlock, 0, arr.length);
recomputeNonZeros();
maxrow = r;
maxcolumn = c;
}
/**
*
* @param val
* @param r
* @param c
* @throws DMLRuntimeException
*/
public void init(double val, int r, int c)
throws DMLRuntimeException
{
//input checks
if ( sparse )
throw new DMLRuntimeException("MatrixBlockDSM.init() can be invoked only on matrices with dense representation.");
if( r*c > rlen*clen )
throw new DMLRuntimeException("MatrixBlockDSM.init() invoked with too large dimensions ("+r+","+c+") vs ("+rlen+","+clen+")");
if( val != 0 ) {
//allocate or resize dense block
allocateDenseBlock();
if( r*c == rlen*clen ) { //FULL MATRIX INIT
//memset value
Arrays.fill(denseBlock, val);
}
else { //PARTIAL MATRIX INIT
//rowwise memset value
for(int i=0, ix=0; i < r; i++, ix+=clen)
Arrays.fill(denseBlock, ix, ix+c, val);
}
//set non zeros to input dims
nonZeros = r*c;
}
maxrow = r;
maxcolumn = c;
}
/**
*
* @return
*/
public boolean isAllocated()
{
if( sparse )
return (sparseRows!=null);
else
return (denseBlock!=null);
}
/**
* @throws DMLRuntimeException
*
*/
public void allocateDenseBlock()
throws DMLRuntimeException
{
allocateDenseBlock( true );
}
/**
*
* @param clearNNZ
* @throws DMLRuntimeException
*/
public void allocateDenseBlock(boolean clearNNZ)
throws DMLRuntimeException
{
long limit = (long)rlen * clen;
//check max size constraint (16GB dense), since java arrays are limited to 2^(32-1) elements)
if( limit > Integer.MAX_VALUE ) {
throw new DMLRuntimeException("Dense in-memory matrix block ("+rlen+"x"+clen+") exceeds supported size of "+Integer.MAX_VALUE+" elements (16GB). " +
"Please, reduce the JVM heapsize to execute this in MR.");
}
//allocate block if non-existing or too small (guaranteed to be 0-initialized),
if(denseBlock == null || denseBlock.length < limit ) {
denseBlock = new double[(int)limit];
}
//clear nnz if necessary
if( clearNNZ ) {
nonZeros = 0;
}
}
/**
*
*/
public void allocateSparseRowsBlock()
{
allocateSparseRowsBlock(true);
}
/**
*
* @param clearNNZ
*/
public void allocateSparseRowsBlock(boolean clearNNZ)
{
//allocate block if non-existing or too small (guaranteed to be 0-initialized),
if( sparseRows == null ) {
sparseRows=new SparseRow[rlen];
}
else if( sparseRows.length < rlen ) {
SparseRow[] oldSparseRows=sparseRows;
sparseRows = new SparseRow[rlen];
for(int i=0; i<Math.min(oldSparseRows.length, rlen); i++) {
sparseRows[i]=oldSparseRows[i];
}
}
//clear nnz if necessary
if( clearNNZ ) {
nonZeros = 0;
}
}
/**
* This should be called only in the read and write functions for CP
* This function should be called before calling any setValueDenseUnsafe()
*
* @param rl
* @param cl
* @throws DMLRuntimeException
*/
public void allocateDenseBlockUnsafe(int rl, int cl)
throws DMLRuntimeException
{
sparse=false;
rlen=rl;
clen=cl;
//allocate dense block
allocateDenseBlock();
}
/**
* Allows to cleanup all previously allocated sparserows or denseblocks.
* This is for example required in reading a matrix with many empty blocks
* via distributed cache into in-memory list of blocks - not cleaning blocks
* from non-empty blocks would significantly increase the total memory consumption.
*
*/
public void cleanupBlock( boolean dense, boolean sparse )
{
if(dense)
denseBlock = null;
if(sparse)
sparseRows = null;
}
////////
// Metadata information
public int getNumRows()
{
return rlen;
}
/**
* NOTE: setNumRows() and setNumColumns() are used only in tertiaryInstruction (for contingency tables)
* and pmm for meta corrections.
*
* @param _r
*/
public void setNumRows(int r)
{
rlen = r;
}
public int getNumColumns()
{
return clen;
}
public void setNumColumns(int c)
{
clen = c;
}
public long getNonZeros()
{
return nonZeros;
}
public boolean isVector()
{
return (rlen == 1 || clen == 1);
}
/**
* Return the maximum row encountered WITHIN the current block
*
*/
public int getMaxRow()
{
if (!sparse)
return getNumRows();
else
return maxrow;
}
public void setMaxRow(int r)
{
maxrow = r;
}
/**
* Return the maximum column encountered WITHIN the current block
*
*/
public int getMaxColumn()
{
if (!sparse)
return getNumColumns();
else
return maxcolumn;
}
public void setMaxColumn(int c)
{
maxcolumn = c;
}
@Override
public boolean isEmpty()
{
return isEmptyBlock(false);
}
public boolean isEmptyBlock()
{
return isEmptyBlock(true);
}
public boolean isEmptyBlock(boolean safe)
{
boolean ret = false;
if( sparse && sparseRows==null )
ret = true;
else if( !sparse && denseBlock==null )
ret = true;
if( nonZeros==0 )
{
//prevent under-estimation
if(safe)
recomputeNonZeros();
ret = (nonZeros==0);
}
return ret;
}
public void setDiag()
{
diag = true;
}
public boolean isDiag()
{
return diag;
}
////////
// Data handling
public double[] getDenseArray()
{
if(sparse)
return null;
return denseBlock;
}
public SparseRow[] getSparseRows()
{
if(!sparse)
return null;
return sparseRows;
}
public SparseRowsIterator getSparseRowsIterator()
{
//check for valid format, should have been checked from outside
if( !sparse )
throw new RuntimeException("getSparseCellInterator should not be called for dense format");
return new SparseRowsIterator(rlen, sparseRows);
}
public SparseRowsIterator getSparseRowsIterator(int rowStart, int rowNum)
{
//check for valid format, should have been checked from outside
if( !sparse )
throw new RuntimeException("getSparseCellInterator should not be called for dense format");
return new SparseRowsIterator(rowStart, rowStart+rowNum, sparseRows);
}
@Override
public void getCellValues(Collection<Double> ret)
{
int limit=rlen*clen;
if(sparse)
{
if(sparseRows==null)
{
for(int i=0; i<limit; i++)
ret.add(0.0);
}else
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
double[] container=sparseRows[r].getValueContainer();
for(int j=0; j<sparseRows[r].size(); j++)
ret.add(container[j]);
}
int zeros=limit-ret.size();
for(int i=0; i<zeros; i++)
ret.add(0.0);
}
}else
{
if(denseBlock==null)
{
for(int i=0; i<limit; i++)
ret.add(0.0);
}else
{
for(int i=0; i<limit; i++)
ret.add(denseBlock[i]);
}
}
}
@Override
public void getCellValues(Map<Double, Integer> ret)
{
int limit=rlen*clen;
if(sparse)
{
if(sparseRows==null)
{
ret.put(0.0, limit);
}else
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
double[] container=sparseRows[r].getValueContainer();
for(int j=0; j<sparseRows[r].size(); j++)
{
Double v=container[j];
Integer old=ret.get(v);
if(old!=null)
ret.put(v, old+1);
else
ret.put(v, 1);
}
}
int zeros=limit-ret.size();
Integer old=ret.get(0.0);
if(old!=null)
ret.put(0.0, old+zeros);
else
ret.put(0.0, zeros);
}
}else
{
if(denseBlock==null)
{
ret.put(0.0, limit);
}else
{
for(int i=0; i<limit; i++)
{
double v=denseBlock[i];
Integer old=ret.get(v);
if(old!=null)
ret.put(v, old+1);
else
ret.put(v, 1);
}
}
}
}
@Override
public double getValue(int r, int c)
{
if(r>rlen || c > clen)
throw new RuntimeException("indexes ("+r+","+c+") out of range ("+rlen+","+clen+")");
if(sparse)
{
if(sparseRows==null || sparseRows.length<=r || sparseRows[r]==null)
return 0;
return sparseRows[r].get(c);
}else
{
if(denseBlock==null)
return 0;
return denseBlock[r*clen+c];
}
}
@Override
public void setValue(int r, int c, double v)
{
if(r>rlen || c > clen)
throw new RuntimeException("indexes ("+r+","+c+") out of range ("+rlen+","+clen+")");
if(sparse)
{
if( (sparseRows==null || sparseRows.length<=r || sparseRows[r]==null) && v==0.0)
return;
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(estimatedNNzsPerRow, clen);
if(sparseRows[r].set(c, v))
nonZeros++;
}else
{
if(denseBlock==null && v==0.0)
return;
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
int index=r*clen+c;
if(denseBlock[index]==0)
nonZeros++;
denseBlock[index]=v;
if(v==0)
nonZeros--;
}
}
@Override
public void setValue(CellIndex index, double v)
{
setValue(index.row, index.column, v);
}
@Override
/**
* If (r,c) \in Block, add v to existing cell
* If not, add a new cell with index (r,c).
*
* This function intentionally avoids the maintenance of NNZ for efficiency.
*
*/
public void addValue(int r, int c, double v) {
if(sparse)
{
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(estimatedNNzsPerRow, clen);
double curV=sparseRows[r].get(c);
curV+=v;
sparseRows[r].set(c, curV);
}
else
{
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
int index=r*clen+c;
denseBlock[index]+=v;
}
}
public double quickGetValue(int r, int c)
{
if(sparse)
{
if(sparseRows==null || sparseRows.length<=r || sparseRows[r]==null)
return 0;
return sparseRows[r].get(c);
}
else
{
if(denseBlock==null)
return 0;
return denseBlock[r*clen+c];
}
}
public void quickSetValue(int r, int c, double v)
{
if(sparse)
{
if( (sparseRows==null || sparseRows.length<=r || sparseRows[r]==null) && v==0.0)
return;
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(estimatedNNzsPerRow, clen);
if(sparseRows[r].set(c, v))
nonZeros++;
}
else
{
if(denseBlock==null && v==0.0)
return;
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
int index=r*clen+c;
if(denseBlock[index]==0)
nonZeros++;
denseBlock[index]=v;
if(v==0)
nonZeros--;
}
}
public double getValueDenseUnsafe(int r, int c)
{
if(denseBlock==null)
return 0;
return denseBlock[r*clen+c];
}
/**
* This can be only called when you know you have properly allocated spaces for a dense representation
* and r and c are in the the range of the dimension
* Note: this function won't keep track of the nozeros
*/
public void setValueDenseUnsafe(int r, int c, double v)
{
denseBlock[r*clen+c]=v;
}
public double getValueSparseUnsafe(int r, int c)
{
if(sparseRows==null || sparseRows.length<=r || sparseRows[r]==null)
return 0;
return sparseRows[r].get(c);
}
/**
* Append value is only used when values are appended at the end of each row for the sparse representation
* This can only be called, when the caller knows the access pattern of the block
* @param r
* @param c
* @param v
*/
public void appendValue(int r, int c, double v)
{
if(v==0) return;
if(!sparse)
quickSetValue(r, c, v);
else
{
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(estimatedNNzsPerRow, clen);
sparseRows[r].append(c, v);
nonZeros++;
}
}
public void appendRow(int r, SparseRow values)
{
if(values==null)
return;
if(sparse)
{
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(values);
else
sparseRows[r].copy(values);
nonZeros+=values.size();
}else
{
int[] cols=values.getIndexContainer();
double[] vals=values.getValueContainer();
for(int i=0; i<values.size(); i++)
quickSetValue(r, cols[i], vals[i]);
}
}
/**
*
* @param that
* @param rowoffset
* @param coloffset
*/
public void appendToSparse( MatrixBlock that, int rowoffset, int coloffset )
{
if( that==null || that.isEmptyBlock(false) )
return; //nothing to append
//init sparse rows if necessary
allocateSparseRowsBlock(false);
if( that.sparse ) //SPARSE <- SPARSE
{
for( int i=0; i<that.rlen; i++ )
{
SparseRow brow = that.sparseRows[i];
if( brow!=null && !brow.isEmpty() )
{
int aix = rowoffset+i;
int len = brow.size();
int[] ix = brow.getIndexContainer();
double[] val = brow.getValueContainer();
if( sparseRows[aix]==null )
sparseRows[aix] = new SparseRow(estimatedNNzsPerRow,clen);
for( int j=0; j<len; j++ )
sparseRows[aix].append(coloffset+ix[j], val[j]);
}
}
}
else //SPARSE <- DENSE
{
for( int i=0; i<that.rlen; i++ )
{
int aix = rowoffset+i;
for( int j=0, bix=i*that.clen; j<that.clen; j++ )
{
double val = that.denseBlock[bix+j];
if( val != 0 )
{
if( sparseRows[aix]==null )//create sparserow only if required
sparseRows[aix] = new SparseRow(estimatedNNzsPerRow,clen);
sparseRows[aix].append(coloffset+j, val);
}
}
}
}
}
/**
*
*/
public void sortSparseRows()
{
if( !sparse || sparseRows==null )
return;
for( SparseRow arow : sparseRows )
if( arow!=null && arow.size()>1 )
arow.sort();
}
/**
* Utility function for computing the min non-zero value.
*
* @return
* @throws DMLRuntimeException
*/
public double minNonZero()
throws DMLRuntimeException
{
//check for empty block and return immediately
if( isEmptyBlock() )
return -1;
//NOTE: usually this method is only applied on dense vectors and hence not really tuned yet.
double min = Double.MAX_VALUE;
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ ){
double val = quickGetValue(i, j);
if( val != 0 )
min = Math.min(min, val);
}
return min;
}
/**
* Wrapper method for reduceall-min of a matrix.
*
* @return
* @throws DMLRuntimeException
*/
public double min()
throws DMLRuntimeException
{
//construct operator
AggregateOperator aop = new AggregateOperator(Double.MAX_VALUE, Builtin.getBuiltinFnObject("min"));
AggregateUnaryOperator auop = new AggregateUnaryOperator( aop, ReduceAll.getReduceAllFnObject());
//execute operation
MatrixBlock out = new MatrixBlock(1, 1, false);
LibMatrixAgg.aggregateUnaryMatrix(this, out, auop);
return out.quickGetValue(0, 0);
}
/**
* Wrapper method for reduceall-max of a matrix.
*
* @return
* @throws DMLRuntimeException
*/
public double max()
throws DMLRuntimeException
{
//construct operator
AggregateOperator aop = new AggregateOperator(-Double.MAX_VALUE, Builtin.getBuiltinFnObject("max"));
AggregateUnaryOperator auop = new AggregateUnaryOperator( aop, ReduceAll.getReduceAllFnObject());
//execute operation
MatrixBlock out = new MatrixBlock(1, 1, false);
LibMatrixAgg.aggregateUnaryMatrix(this, out, auop);
return out.quickGetValue(0, 0);
}
////////
// sparsity handling functions
/**
* Returns the current representation (true for sparse).
*
*/
public boolean isInSparseFormat()
{
return sparse;
}
/**
*
* @return
*/
public boolean isUltraSparse()
{
double sp = ((double)nonZeros/rlen)/clen;
//check for sparse representation in order to account for vectors in dense
return sparse && sp<ULTRA_SPARSITY_TURN_POINT && nonZeros<40;
}
/**
* Evaluates if this matrix block should be in sparse format in
* memory. Note that this call does not change the representation -
* for this please call examSparsity.
*
* @return
*/
public boolean evalSparseFormatInMemory()
{
long lrlen = (long) rlen;
long lclen = (long) clen;
long lnonZeros = (long) nonZeros;
//ensure exact size estimates for write
if( lnonZeros<=0 ) {
recomputeNonZeros();
lnonZeros = (long) nonZeros;
}
//decide on in-memory representation
return evalSparseFormatInMemory(lrlen, lclen, lnonZeros);
}
private boolean evalSparseFormatInMemory(boolean transpose)
{
int lrlen = (transpose) ? clen : rlen;
int lclen = (transpose) ? rlen : clen;
long lnonZeros = (long) nonZeros;
//ensure exact size estimates for write
if( lnonZeros<=0 ) {
recomputeNonZeros();
lnonZeros = (long) nonZeros;
}
//decide on in-memory representation
return evalSparseFormatInMemory(lrlen, lclen, lnonZeros);
}
/**
* Evaluates if this matrix block should be in sparse format on
* disk. This applies to any serialized matrix representation, i.e.,
* when writing to in-memory buffer pool pages or writing to local fs
* or hdfs.
*
* @return
*/
public boolean evalSparseFormatOnDisk()
{
long lrlen = (long) rlen;
long lclen = (long) clen;
//ensure exact size estimates for write
if( nonZeros <= 0 ) {
recomputeNonZeros();
}
//decide on in-memory representation
return evalSparseFormatOnDisk(lrlen, lclen, nonZeros);
}
/**
* Evaluates if this matrix block should be in sparse format in
* memory. Depending on the current representation, the state of the
* matrix block is changed to the right representation if necessary.
* Note that this consumes for the time of execution memory for both
* representations.
*
* @throws DMLRuntimeException
*/
public void examSparsity()
throws DMLRuntimeException
{
//determine target representation
boolean sparseDst = evalSparseFormatInMemory();
//check for empty blocks (e.g., sparse-sparse)
if( isEmptyBlock(false) )
cleanupBlock(true, true);
//change representation if required (also done for
//empty blocks in order to set representation flags)
if( sparse && !sparseDst)
sparseToDense();
else if( !sparse && sparseDst )
denseToSparse();
}
/**
* Evaluates if a matrix block with the given characteristics should be in sparse format
* in memory.
*
* @param brlen
* @param bclen
* @param nnz
* @return
*/
public static boolean evalSparseFormatInMemory( final long nrows, final long ncols, final long nnz )
{
//evaluate sparsity threshold
double lsparsity = (double)nnz/nrows/ncols;
boolean lsparse = (lsparsity < SPARSITY_TURN_POINT);
//compare size of sparse and dense representation in order to prevent
//that the sparse size exceed the dense size since we use the dense size
//as worst-case estimate if unknown (and it requires less io from
//main memory).
double sizeSparse = estimateSizeSparseInMemory(nrows, ncols, lsparsity);
double sizeDense = estimateSizeDenseInMemory(nrows, ncols);
return lsparse && (sizeSparse<sizeDense);
}
/**
* Evaluates if a matrix block with the given characteristics should be in sparse format
* on disk (or in any other serialized representation).
*
* @param brlen
* @param bclen
* @param nnz
* @return
*/
public static boolean evalSparseFormatOnDisk( final long nrows, final long ncols, final long nnz )
{
//evaluate sparsity threshold
double lsparsity = ((double)nnz/nrows)/ncols;
boolean lsparse = (lsparsity < SPARSITY_TURN_POINT);
double sizeUltraSparse = estimateSizeUltraSparseOnDisk( nrows, ncols, nnz );
double sizeSparse = estimateSizeSparseOnDisk(nrows, ncols, nnz);
double sizeDense = estimateSizeDenseOnDisk(nrows, ncols);
return lsparse && (sizeSparse<sizeDense || sizeUltraSparse<sizeDense);
}
////////
// basic block handling functions
/**
*
*/
private void denseToSparse()
{
//set target representation
sparse = true;
//early abort on empty blocks
if(denseBlock==null)
return;
//allocate sparse target block (reset required to maintain nnz again)
allocateSparseRowsBlock();
reset();
//copy dense to sparse
double[] a = denseBlock;
SparseRow[] c = sparseRows;
for( int i=0, aix=0; i<rlen; i++ )
for(int j=0; j<clen; j++, aix++)
if( a[aix] != 0 ) {
if( c[i]==null ) //create sparse row only if required
c[i]=new SparseRow(estimatedNNzsPerRow, clen);
c[i].append(j, a[aix]);
nonZeros++;
}
//cleanup dense block
denseBlock = null;
}
/**
*
* @throws DMLRuntimeException
*/
private void sparseToDense()
throws DMLRuntimeException
{
//set target representation
sparse = false;
//early abort on empty blocks
if(sparseRows==null)
return;
int limit=rlen*clen;
if ( limit < 0 ) {
throw new DMLRuntimeException("Unexpected error in sparseToDense().. limit < 0: " + rlen + ", " + clen + ", " + limit);
}
//allocate dense target block, but keep nnz (no need to maintain)
allocateDenseBlock(false);
Arrays.fill(denseBlock, 0, limit, 0);
//copy sparse to dense
SparseRow[] a = sparseRows;
double[] c = denseBlock;
for( int i=0, cix=0; i<rlen; i++, cix+=clen)
if( a[i] != null && !a[i].isEmpty() ) {
int alen = a[i].size();
int[] aix = a[i].getIndexContainer();
double[] avals = a[i].getValueContainer();
for(int j=0; j<alen; j++)
if( avals[j] != 0 )
c[ cix+aix[j] ] = avals[j];
}
//cleanup sparse rows
sparseRows = null;
}
public void recomputeNonZeros()
{
nonZeros=0;
if( sparse && sparseRows!=null )
{
int limit = Math.min(rlen, sparseRows.length);
for(int i=0; i<limit; i++)
if(sparseRows[i]!=null)
nonZeros += sparseRows[i].size();
}
else if( !sparse && denseBlock!=null )
{
int limit=rlen*clen;
for(int i=0; i<limit; i++)
{
//HotSpot JVM bug causes crash in presence of NaNs
//nonZeros += (denseBlock[i]!=0) ? 1 : 0;
if( denseBlock[i]!=0 )
nonZeros++;
}
}
}
private long recomputeNonZeros(int rl, int ru, int cl, int cu)
{
long nnz = 0;
if(sparse)
{
if(sparseRows!=null)
{
int rlimit = Math.min( ru+1, Math.min(rlen, sparseRows.length) );
if( cl==0 && cu==clen-1 ) //specific case: all cols
{
for(int i=rl; i<rlimit; i++)
if(sparseRows[i]!=null && !sparseRows[i].isEmpty())
nnz+=sparseRows[i].size();
}
else if( cl==cu ) //specific case: one column
{
for(int i=rl; i<rlimit; i++)
if(sparseRows[i]!=null && !sparseRows[i].isEmpty())
nnz += (sparseRows[i].get(cl)!=0) ? 1 : 0;
}
else //general case
{
int astart,aend;
for(int i=rl; i<rlimit; i++)
if(sparseRows[i]!=null && !sparseRows[i].isEmpty())
{
SparseRow arow = sparseRows[i];
astart = arow.searchIndexesFirstGTE(cl);
aend = arow.searchIndexesFirstGTE(cu);
nnz += (astart!=-1) ? (aend-astart+1) : 0;
}
}
}
}else
{
if(denseBlock!=null)
{
for( int i=rl, ix=rl*clen; i<=ru; i++, ix+=clen )
for( int j=cl; j<=cu; j++ )
{
//HotSpot JVM bug causes crash in presence of NaNs
//nnz += (denseBlock[ix+j]!=0) ? 1 : 0;
if( denseBlock[ix+j]!=0 )
nnz++;
}
}
}
return nnz;
}
public void copy(MatrixValue thatValue)
{
MatrixBlock that;
try {
that = checkType(thatValue);
} catch (DMLUnsupportedOperationException e) {
throw new RuntimeException(e);
}
if( this == that ) //prevent data loss (e.g., on sparse-dense conversion)
throw new RuntimeException( "Copy must not overwrite itself!" );
this.rlen = that.rlen;
this.clen = that.clen;
this.sparse = that.evalSparseFormatInMemory();
estimatedNNzsPerRow=(int)Math.ceil((double)thatValue.getNonZeros()/(double)rlen);
if(this.sparse && that.sparse)
copySparseToSparse(that);
else if(this.sparse && !that.sparse)
copyDenseToSparse(that);
else if(!this.sparse && that.sparse)
copySparseToDense(that);
else
copyDenseToDense(that);
}
public void copy(MatrixValue thatValue, boolean sp) {
MatrixBlock that;
try {
that = checkType(thatValue);
} catch (DMLUnsupportedOperationException e) {
throw new RuntimeException(e);
}
if( this == that ) //prevent data loss (e.g., on sparse-dense conversion)
throw new RuntimeException( "Copy must not overwrite itself!" );
this.rlen=that.rlen;
this.clen=that.clen;
this.sparse=sp;
estimatedNNzsPerRow=(int)Math.ceil((double)thatValue.getNonZeros()/(double)rlen);
if(this.sparse && that.sparse)
copySparseToSparse(that);
else if(this.sparse && !that.sparse)
copyDenseToSparse(that);
else if(!this.sparse && that.sparse)
copySparseToDense(that);
else
copyDenseToDense(that);
}
private void copySparseToSparse(MatrixBlock that)
{
this.nonZeros=that.nonZeros;
if( that.isEmptyBlock(false) )
{
resetSparse();
return;
}
allocateSparseRowsBlock(false);
for(int i=0; i<Math.min(that.sparseRows.length, rlen); i++)
{
if(that.sparseRows[i]!=null)
{
if(sparseRows[i]==null)
sparseRows[i]=new SparseRow(that.sparseRows[i]);
else
sparseRows[i].copy(that.sparseRows[i]);
}else if(this.sparseRows[i]!=null)
this.sparseRows[i].reset(estimatedNNzsPerRow, clen);
}
}
private void copyDenseToDense(MatrixBlock that)
{
nonZeros = that.nonZeros;
int limit = rlen*clen;
//plain reset to 0 for empty input
if( that.isEmptyBlock(false) )
{
if(denseBlock!=null)
Arrays.fill(denseBlock, 0, limit, 0);
return;
}
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
//actual copy
System.arraycopy(that.denseBlock, 0, denseBlock, 0, limit);
}
private void copySparseToDense(MatrixBlock that)
{
this.nonZeros=that.nonZeros;
if( that.isEmptyBlock(false) )
{
if(denseBlock!=null)
Arrays.fill(denseBlock, 0);
return;
}
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
int start=0;
for(int r=0; r<Math.min(that.sparseRows.length, rlen); r++, start+=clen)
{
if(that.sparseRows[r]==null)
continue;
double[] values=that.sparseRows[r].getValueContainer();
int[] cols=that.sparseRows[r].getIndexContainer();
for(int i=0; i<that.sparseRows[r].size(); i++)
{
denseBlock[start+cols[i]]=values[i];
}
}
}
private void copyDenseToSparse(MatrixBlock that)
{
nonZeros = that.nonZeros;
if( that.isEmptyBlock(false) )
{
resetSparse();
return;
}
allocateSparseRowsBlock(false);
for(int i=0, ix=0; i<rlen; i++)
{
if( sparseRows[i]!=null )
sparseRows[i].reset(estimatedNNzsPerRow, clen);
for(int j=0; j<clen; j++)
{
double val = that.denseBlock[ix++];
if( val != 0 )
{
if(sparseRows[i]==null) //create sparse row only if required
sparseRows[i]=new SparseRow(estimatedNNzsPerRow, clen);
sparseRows[i].append(j, val);
}
}
}
}
/**
* In-place copy of matrix src into the index range of the existing current matrix.
* Note that removal of existing nnz in the index range and nnz maintenance is
* only done if 'awareDestNZ=true',
*
* @param rl
* @param ru
* @param cl
* @param cu
* @param src
* @param awareDestNZ
* true, forces (1) to remove existing non-zeros in the index range of the
* destination if not present in src and (2) to internally maintain nnz
* false, assume empty index range in destination and do not maintain nnz
* (the invoker is responsible to recompute nnz after all copies are done)
* @throws DMLRuntimeException
*/
public void copy(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ )
throws DMLRuntimeException
{
if(sparse && src.sparse)
copySparseToSparse(rl, ru, cl, cu, src, awareDestNZ);
else if(sparse && !src.sparse)
copyDenseToSparse(rl, ru, cl, cu, src, awareDestNZ);
else if(!sparse && src.sparse)
copySparseToDense(rl, ru, cl, cu, src, awareDestNZ);
else
copyDenseToDense(rl, ru, cl, cu, src, awareDestNZ);
}
private void copySparseToSparse(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ)
{
//handle empty src and dest
if( src.isEmptyBlock(false) )
{
if( awareDestNZ && sparseRows != null )
copyEmptyToSparse(rl, ru, cl, cu, true);
return;
}
if(sparseRows==null)
sparseRows=new SparseRow[rlen];
else if( awareDestNZ )
{
copyEmptyToSparse(rl, ru, cl, cu, true);
//explicit clear if awareDestNZ because more efficient since
//src will have multiple columns and only few overwriting values
}
//copy values
int alen;
int[] aix;
double[] avals;
for( int i=0; i<src.rlen; i++ )
{
SparseRow arow = src.sparseRows[i];
if( arow != null && !arow.isEmpty() )
{
alen = arow.size();
aix = arow.getIndexContainer();
avals = arow.getValueContainer();
if( sparseRows[rl+i] == null || sparseRows[rl+i].isEmpty() )
{
sparseRows[rl+i] = new SparseRow(estimatedNNzsPerRow, clen);
SparseRow brow = sparseRows[rl+i];
for( int j=0; j<alen; j++ )
brow.append(cl+aix[j], avals[j]);
if( awareDestNZ )
nonZeros += brow.size();
}
else if( awareDestNZ ) //general case (w/ awareness NNZ)
{
SparseRow brow = sparseRows[rl+i];
int lnnz = brow.size();
if( cl==cu && cl==aix[0] )
{
if (avals[0]==0)
brow.deleteIndex(cl);
else
brow.set(cl, avals[0] );
}
else
{
brow.deleteIndexRange(cl, cu);
for( int j=0; j<alen; j++ )
brow.set(cl+aix[j], avals[j]);
}
nonZeros += (brow.size() - lnnz);
}
else //general case (w/o awareness NNZ)
{
SparseRow brow = sparseRows[rl+i];
//brow.set(cl, arow);
for( int j=0; j<alen; j++ )
brow.set(cl+aix[j], avals[j]);
}
}
}
}
private void copySparseToDense(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ)
throws DMLRuntimeException
{
//handle empty src and dest
if( src.isEmptyBlock(false) )
{
if( awareDestNZ && denseBlock != null ) {
nonZeros -= recomputeNonZeros(rl, ru, cl, cu);
copyEmptyToDense(rl, ru, cl, cu);
}
return;
}
if(denseBlock==null)
allocateDenseBlock();
else if( awareDestNZ )
{
nonZeros -= recomputeNonZeros(rl, ru, cl, cu);
copyEmptyToDense(rl, ru, cl, cu);
}
//copy values
int alen;
int[] aix;
double[] avals;
for( int i=0, ix=rl*clen; i<src.rlen; i++, ix+=clen )
{
SparseRow arow = src.sparseRows[i];
if( arow != null && !arow.isEmpty() )
{
alen = arow.size();
aix = arow.getIndexContainer();
avals = arow.getValueContainer();
for( int j=0; j<alen; j++ )
denseBlock[ix+cl+aix[j]] = avals[j];
if(awareDestNZ)
nonZeros += alen;
}
}
}
private void copyDenseToSparse(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ)
{
//handle empty src and dest
if( src.isEmptyBlock(false) )
{
if( awareDestNZ && sparseRows != null )
copyEmptyToSparse(rl, ru, cl, cu, true);
return;
}
if(sparseRows==null)
sparseRows=new SparseRow[rlen];
//no need to clear for awareDestNZ since overwritten
//copy values
double val;
for( int i=0, ix=0; i<src.rlen; i++, ix+=src.clen )
{
int rix = rl + i;
if( sparseRows[rix]==null || sparseRows[rix].isEmpty() )
{
for( int j=0; j<src.clen; j++ )
if( (val = src.denseBlock[ix+j]) != 0 )
{
if( sparseRows[rix]==null )
sparseRows[rix] = new SparseRow(estimatedNNzsPerRow, clen);
sparseRows[rix].append(cl+j, val);
}
if( awareDestNZ && sparseRows[rix]!=null )
nonZeros += sparseRows[rix].size();
}
else if( awareDestNZ ) //general case (w/ awareness NNZ)
{
SparseRow brow = sparseRows[rix];
int lnnz = brow.size();
if( cl==cu )
{
if ((val = src.denseBlock[ix])==0)
brow.deleteIndex(cl);
else
brow.set(cl, val);
}
else
{
brow.deleteIndexRange(cl, cu);
for( int j=0; j<src.clen; j++ )
if( (val = src.denseBlock[ix+j]) != 0 )
brow.set(cl+j, val);
}
nonZeros += (brow.size() - lnnz);
}
else //general case (w/o awareness NNZ)
{
SparseRow brow = sparseRows[rix];
for( int j=0; j<src.clen; j++ )
if( (val = src.denseBlock[ix+j]) != 0 )
brow.set(cl+j, val);
}
}
}
private void copyDenseToDense(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ)
throws DMLRuntimeException
{
//handle empty src and dest
if( src.isEmptyBlock(false) )
{
if( awareDestNZ && denseBlock != null ) {
nonZeros -= recomputeNonZeros(rl, ru, cl, cu);
copyEmptyToDense(rl, ru, cl, cu);
}
return;
}
if(denseBlock==null)
allocateDenseBlock();
//no need to clear for awareDestNZ since overwritten
if( awareDestNZ )
nonZeros = nonZeros - recomputeNonZeros(rl, ru, cl, cu) + src.nonZeros;
//copy values
int rowLen = cu-cl+1;
if(clen == src.clen) //optimization for equal width
System.arraycopy(src.denseBlock, 0, denseBlock, rl*clen+cl, src.rlen*src.clen);
else
for( int i=0, ix1=0, ix2=rl*clen+cl; i<src.rlen; i++, ix1+=src.clen, ix2+=clen )
System.arraycopy(src.denseBlock, ix1, denseBlock, ix2, rowLen);
}
private void copyEmptyToSparse(int rl, int ru, int cl, int cu, boolean updateNNZ )
{
if( cl==cu ) //specific case: column vector
{
if( updateNNZ )
{
for( int i=rl; i<=ru; i++ )
if( sparseRows[i] != null && !sparseRows[i].isEmpty() )
{
int lnnz = sparseRows[i].size();
sparseRows[i].deleteIndex(cl);
nonZeros += (sparseRows[i].size()-lnnz);
}
}
else
{
for( int i=rl; i<=ru; i++ )
if( sparseRows[i] != null && !sparseRows[i].isEmpty() )
sparseRows[i].deleteIndex(cl);
}
}
else
{
if( updateNNZ )
{
for( int i=rl; i<=ru; i++ )
if( sparseRows[i] != null && !sparseRows[i].isEmpty() )
{
int lnnz = sparseRows[i].size();
sparseRows[i].deleteIndexRange(cl, cu);
nonZeros += (sparseRows[i].size()-lnnz);
}
}
else
{
for( int i=rl; i<=ru; i++ )
if( sparseRows[i] != null && !sparseRows[i].isEmpty() )
sparseRows[i].deleteIndexRange(cl, cu);
}
}
}
private void copyEmptyToDense(int rl, int ru, int cl, int cu)
{
int rowLen = cu-cl+1;
if(clen == rowLen) //optimization for equal width
Arrays.fill(denseBlock, rl*clen+cl, ru*clen+cu+1, 0);
else
for( int i=rl, ix2=rl*clen+cl; i<=ru; i++, ix2+=clen )
Arrays.fill(denseBlock, ix2, ix2+rowLen, 0);
}
/**
* Merge disjoint: merges all non-zero values of the given input into the current
* matrix block. Note that this method does NOT check for overlapping entries;
* it's the callers reponsibility of ensuring disjoint matrix blocks.
*
* The appendOnly parameter is only relevant for sparse target blocks; if true,
* we only append values and do not sort sparse rows for each call; this is useful
* whenever we merge iterators of matrix blocks into one target block.
*
* @param that
* @param appendOnly
* @throws DMLRuntimeException
*/
public void merge(MatrixBlock that, boolean appendOnly)
throws DMLRuntimeException
{
//check for empty input source (nothing to merge)
if( that == null || that.isEmptyBlock(false) )
return;
//check dimensions (before potentially copy to prevent implicit dimension change)
//this also does a best effort check for disjoint input blocks via the number of non-zeros
if( rlen != that.rlen || clen != that.clen )
throw new DMLRuntimeException("Dimension mismatch on merge disjoint (target="+rlen+"x"+clen+", source="+that.rlen+"x"+that.clen+")");
if( (long)this.nonZeros+ that.nonZeros > (long)rlen*clen )
throw new DMLRuntimeException("Number of non-zeros mismatch on merge disjoint (target="+rlen+"x"+clen+", nnz target="+nonZeros+", nnz source="+that.nonZeros+")");
//check for empty target (copy in full)
if( this.isEmptyBlock(false) ) {
this.copy(that);
return;
}
//core matrix block merge (guaranteed non-empty source/target, nnz maintenance not required)
long nnz = this.nonZeros + that.nonZeros;
if( sparse )
this.mergeIntoSparse(that, appendOnly);
else
this.mergeIntoDense(that);
//maintain number of nonzeros
this.nonZeros = nnz;
}
/**
*
* @param that
*/
private void mergeIntoDense(MatrixBlock that)
{
if( that.sparse ) //DENSE <- SPARSE
{
SparseRow[] b = that.sparseRows;
for( int i=0; i<rlen; i++ )
if( b[i] != null && !b[i].isEmpty() )
{
SparseRow brow = b[i];
int blen = brow.size();
int[] bix = brow.getIndexContainer();
double[] bval = brow.getValueContainer();
for( int j=0; j<blen; j++ )
if( bval[j] != 0 )
this.quickSetValue(i, bix[j], bval[j]);
}
}
else //DENSE <- DENSE
{
double[] a = this.denseBlock;
double[] b = that.denseBlock;
int len = rlen * clen;
for( int i=0; i<len; i++ )
a[i] = ( b[i] != 0 ) ? b[i] : a[i];
}
}
/**
*
* @param that
* @param appendOnly
*/
private void mergeIntoSparse(MatrixBlock that, boolean appendOnly)
{
if( that.sparse ) //SPARSE <- SPARSE
{
SparseRow[] a = this.sparseRows;
SparseRow[] b = that.sparseRows;
for( int i=0; i<rlen; i++ )
{
if( b[i] != null && !b[i].isEmpty() )
{
if( a[i] == null || a[i].isEmpty() ) {
//copy entire sparse row (no sort required)
a[i] = new SparseRow(b[i]);
}
else
{
boolean appended = false;
SparseRow brow = b[i];
int blen = brow.size();
int[] bix = brow.getIndexContainer();
double[] bval = brow.getValueContainer();
for( int j=0; j<blen; j++ ) {
if( bval[j] != 0 ) {
this.appendValue(i, bix[j], bval[j]);
appended = true;
}
}
//only sort if value appended
if( !appendOnly && appended )
this.sparseRows[i].sort();
}
}
}
}
else //SPARSE <- DENSE
{
double[] b = that.denseBlock;
for( int i=0, bix=0; i<rlen; i++, bix+=clen )
{
boolean appended = false;
for( int j=0; j<clen; j++ ) {
if( b[bix+j] != 0 ) {
this.appendValue(i, j, b[bix+j]);
appended = true;
}
}
//only sort if value appended
if( !appendOnly && appended )
this.sparseRows[i].sort();
}
}
}
////////
// Input/Output functions
@Override
public void readFields(DataInput in)
throws IOException
{
//read basic header (int rlen, int clen, byte type)
rlen = in.readInt();
clen = in.readInt();
byte bformat = in.readByte();
//check type information
if( bformat<0 || bformat>=BlockType.values().length )
throw new IOException("invalid format: '"+bformat+"' (need to be 0-"+BlockType.values().length+").");
BlockType format=BlockType.values()[bformat];
try
{
switch(format)
{
case ULTRA_SPARSE_BLOCK:
nonZeros = readNnzInfo( in, true );
sparse = evalSparseFormatInMemory(rlen, clen, nonZeros);
cleanupBlock(true, true); //clean all
if( sparse )
readUltraSparseBlock(in);
else
readUltraSparseToDense(in);
break;
case SPARSE_BLOCK:
nonZeros = readNnzInfo( in, false );
sparse = evalSparseFormatInMemory(rlen, clen, nonZeros);
cleanupBlock(sparse, !sparse);
if( sparse )
readSparseBlock(in);
else
readSparseToDense(in);
break;
case DENSE_BLOCK:
sparse = false;
cleanupBlock(false, true); //reuse dense
readDenseBlock(in); //always dense in-mem if dense on disk
break;
case EMPTY_BLOCK:
sparse = true;
cleanupBlock(true, true); //clean all
nonZeros = 0;
break;
}
}
catch(DMLRuntimeException ex)
{
throw new IOException("Error reading block of type '"+format.toString()+"'.", ex);
}
}
/**
*
* @param in
* @throws IOException
* @throws DMLRuntimeException
*/
private void readDenseBlock(DataInput in)
throws IOException, DMLRuntimeException
{
allocateDenseBlock(true); //allocate block, clear nnz
int limit = rlen*clen;
if( in instanceof MatrixBlockDataInput ) //fast deserialize
{
MatrixBlockDataInput mbin = (MatrixBlockDataInput)in;
nonZeros = mbin.readDoubleArray(limit, denseBlock);
}
else if( in instanceof DataInputBuffer && MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION )
{
//workaround because sequencefile.reader.next(key, value) does not yet support serialization framework
DataInputBuffer din = (DataInputBuffer)in;
MatrixBlockDataInput mbin = new FastBufferedDataInputStream(din);
nonZeros = mbin.readDoubleArray(limit, denseBlock);
((FastBufferedDataInputStream)mbin).close();
}
else //default deserialize
{
for( int i=0; i<limit; i++ )
{
denseBlock[i]=in.readDouble();
if(denseBlock[i]!=0)
nonZeros++;
}
}
}
/**
*
* @param in
* @throws IOException
*/
private void readSparseBlock(DataInput in)
throws IOException
{
allocateSparseRowsBlock(false);
resetSparse(); //reset all sparse rows
if( in instanceof MatrixBlockDataInput ) //fast deserialize
{
MatrixBlockDataInput mbin = (MatrixBlockDataInput)in;
nonZeros = mbin.readSparseRows(rlen, sparseRows);
}
else if( in instanceof DataInputBuffer && MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION )
{
//workaround because sequencefile.reader.next(key, value) does not yet support serialization framework
DataInputBuffer din = (DataInputBuffer)in;
MatrixBlockDataInput mbin = new FastBufferedDataInputStream(din);
nonZeros = mbin.readSparseRows(rlen, sparseRows);
((FastBufferedDataInputStream)mbin).close();
}
else //default deserialize
{
for(int r=0; r<rlen; r++)
{
int nr=in.readInt();
if(nr==0)
{
if(sparseRows[r]!=null)
sparseRows[r].reset(estimatedNNzsPerRow, clen);
continue;
}
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(nr);
else
sparseRows[r].reset(nr, clen);
for(int j=0; j<nr; j++)
sparseRows[r].append(in.readInt(), in.readDouble());
}
}
}
/**
*
* @param in
* @throws IOException
* @throws DMLRuntimeException
*/
private void readSparseToDense(DataInput in)
throws IOException, DMLRuntimeException
{
allocateDenseBlock(false); //allocate block
Arrays.fill(denseBlock, 0);
for(int r=0; r<rlen; r++)
{
int nr = in.readInt();
for( int j=0; j<nr; j++ )
{
int c = in.readInt();
double val = in.readDouble();
denseBlock[r*clen+c] = val;
}
}
}
/**
*
* @param in
* @throws IOException
*/
private void readUltraSparseBlock(DataInput in)
throws IOException
{
allocateSparseRowsBlock(false); //adjust to size
resetSparse(); //reset all sparse rows
for(long i=0; i<nonZeros; i++)
{
int r = in.readInt();
int c = in.readInt();
double val = in.readDouble();
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(1,clen);
sparseRows[r].append(c, val);
}
}
/**
*
* @param in
* @throws IOException
* @throws DMLRuntimeException
*/
private void readUltraSparseToDense(DataInput in)
throws IOException, DMLRuntimeException
{
allocateDenseBlock(false); //allocate block
Arrays.fill(denseBlock, 0);
for(long i=0; i<nonZeros; i++)
{
int r = in.readInt();
int c = in.readInt();
double val = in.readDouble();
denseBlock[r*clen+c] = val;
}
}
@Override
public void write(DataOutput out)
throws IOException
{
//determine format
boolean sparseSrc = sparse;
boolean sparseDst = evalSparseFormatOnDisk();
//write first part of header
out.writeInt(rlen);
out.writeInt(clen);
if( sparseSrc )
{
//write sparse to *
if( sparseRows==null || nonZeros==0 )
writeEmptyBlock(out);
else if( nonZeros<rlen && sparseDst )
writeSparseToUltraSparse(out);
else if( sparseDst )
writeSparseBlock(out);
else
writeSparseToDense(out);
}
else
{
//write dense to *
if( denseBlock==null || nonZeros==0 )
writeEmptyBlock(out);
else if( nonZeros<rlen && sparseDst )
writeDenseToUltraSparse(out);
else if( sparseDst )
writeDenseToSparse(out);
else
writeDenseBlock(out);
}
}
/**
*
* @param out
* @throws IOException
*/
private void writeEmptyBlock(DataOutput out)
throws IOException
{
//empty blocks do not need to materialize row information
out.writeByte( BlockType.EMPTY_BLOCK.ordinal() );
}
/**
*
* @param out
* @throws IOException
*/
private void writeDenseBlock(DataOutput out)
throws IOException
{
out.writeByte( BlockType.DENSE_BLOCK.ordinal() );
int limit=rlen*clen;
if( out instanceof MatrixBlockDataOutput ) //fast serialize
((MatrixBlockDataOutput)out).writeDoubleArray(limit, denseBlock);
else //general case (if fast serialize not supported)
for(int i=0; i<limit; i++)
out.writeDouble(denseBlock[i]);
}
/**
*
* @param out
* @throws IOException
*/
private void writeSparseBlock(DataOutput out)
throws IOException
{
out.writeByte( BlockType.SPARSE_BLOCK.ordinal() );
writeNnzInfo( out, false );
if( out instanceof MatrixBlockDataOutput ) //fast serialize
((MatrixBlockDataOutput)out).writeSparseRows(rlen, sparseRows);
else //general case (if fast serialize not supported)
{
int r=0;
for(;r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
out.writeInt(0);
else
{
int nr=sparseRows[r].size();
out.writeInt(nr);
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int j=0; j<nr; j++)
{
out.writeInt(cols[j]);
out.writeDouble(values[j]);
}
}
}
for(;r<rlen; r++)
out.writeInt(0);
}
}
/**
*
* @param out
* @throws IOException
*/
private void writeSparseToUltraSparse(DataOutput out)
throws IOException
{
out.writeByte( BlockType.ULTRA_SPARSE_BLOCK.ordinal() );
writeNnzInfo( out, true );
long wnnz = 0;
for(int r=0;r<Math.min(rlen, sparseRows.length); r++)
if(sparseRows[r]!=null && !sparseRows[r].isEmpty() )
{
int alen = sparseRows[r].size();
int[] aix = sparseRows[r].getIndexContainer();
double[] avals = sparseRows[r].getValueContainer();
for(int j=0; j<alen; j++) {
out.writeInt(r);
out.writeInt(aix[j]);
out.writeDouble(avals[j]);
wnnz++;
}
}
//validity check (nnz must exactly match written nnz)
if( nonZeros != wnnz ) {
throw new IOException("Invalid number of serialized non-zeros: "+wnnz+" (expected: "+nonZeros+")");
}
}
/**
*
* @param out
* @throws IOException
*/
private void writeSparseToDense(DataOutput out)
throws IOException
{
//write block type 'dense'
out.writeByte( BlockType.DENSE_BLOCK.ordinal() );
//write data (from sparse to dense)
if( sparseRows==null ) //empty block
for( int i=0; i<rlen*clen; i++ )
out.writeDouble(0);
else //existing sparse block
{
for( int i=0; i<rlen; i++ )
{
if( i<sparseRows.length && sparseRows[i]!=null && !sparseRows[i].isEmpty() )
{
SparseRow arow = sparseRows[i];
int alen = arow.size();
int[] aix = arow.getIndexContainer();
double[] avals = arow.getValueContainer();
//foreach non-zero value, fill with 0s if required
for( int j=0, j2=0; j2<alen; j++, j2++ ) {
for( ; j<aix[j2]; j++ )
out.writeDouble( 0 );
out.writeDouble( avals[j2] );
}
//remaining 0 values in row
for( int j=aix[alen-1]+1; j<clen; j++)
out.writeDouble( 0 );
}
else //empty row
for( int j=0; j<clen; j++ )
out.writeDouble( 0 );
}
}
}
/**
*
* @param out
* @throws IOException
*/
private void writeDenseToUltraSparse(DataOutput out) throws IOException
{
out.writeByte( BlockType.ULTRA_SPARSE_BLOCK.ordinal() );
writeNnzInfo( out, true );
long wnnz = 0;
for(int r=0, ix=0; r<rlen; r++)
for(int c=0; c<clen; c++, ix++)
if( denseBlock[ix]!=0 )
{
out.writeInt(r);
out.writeInt(c);
out.writeDouble(denseBlock[ix]);
wnnz++;
}
//validity check (nnz must exactly match written nnz)
if( nonZeros != wnnz ) {
throw new IOException("Invalid number of serialized non-zeros: "+wnnz+" (expected: "+nonZeros+")");
}
}
/**
*
* @param out
* @throws IOException
*/
private void writeDenseToSparse(DataOutput out)
throws IOException
{
out.writeByte( BlockType.SPARSE_BLOCK.ordinal() ); //block type
writeNnzInfo( out, false );
int start=0;
for(int r=0; r<rlen; r++)
{
//count nonzeros
int nr=0;
for(int i=start; i<start+clen; i++)
if(denseBlock[i]!=0.0)
nr++;
out.writeInt(nr);
for(int c=0; c<clen; c++)
{
if(denseBlock[start]!=0.0)
{
out.writeInt(c);
out.writeDouble(denseBlock[start]);
}
start++;
}
}
}
/**
*
* @param in
* @throws IOException
*/
private long readNnzInfo( DataInput in, boolean ultrasparse )
throws IOException
{
//note: if ultrasparse, int always sufficient because nnz<rlen
// where rlen is limited to integer
long lrlen = (long)rlen;
long lclen = (long)clen;
//read long if required, otherwise int (see writeNnzInfo, consistency required)
if( lrlen*lclen > Integer.MAX_VALUE && !ultrasparse) {
nonZeros = in.readLong();
}
else {
nonZeros = in.readInt();
}
return nonZeros;
}
/**
*
* @param out
* @throws IOException
*/
private void writeNnzInfo( DataOutput out, boolean ultrasparse )
throws IOException
{
//note: if ultrasparse, int always sufficient because nnz<rlen
// where rlen is limited to integer
long lrlen = (long)rlen;
long lclen = (long)clen;
//write long if required, otherwise int
if( lrlen*lclen > Integer.MAX_VALUE && !ultrasparse) {
out.writeLong( nonZeros );
}
else {
out.writeInt( (int)nonZeros );
}
}
/**
* NOTE: The used estimates must be kept consistent with the respective write functions.
*
* @return
*/
public long getExactSizeOnDisk()
{
//determine format
boolean sparseSrc = sparse;
boolean sparseDst = evalSparseFormatOnDisk();
long lrlen = (long) rlen;
long lclen = (long) clen;
long lnonZeros = (long) nonZeros;
//ensure exact size estimates for write
if( lnonZeros <= 0 )
{
recomputeNonZeros();
lnonZeros = (long) nonZeros;
}
//get exact size estimate (see write for the corresponding meaning)
if( sparseSrc )
{
//write sparse to *
if(sparseRows==null || lnonZeros==0)
return HEADER_SIZE; //empty block
else if( lnonZeros<lrlen && sparseDst )
return estimateSizeUltraSparseOnDisk(lrlen, lclen, lnonZeros); //ultra sparse block
else if( sparseDst )
return estimateSizeSparseOnDisk(lrlen, lclen, lnonZeros); //sparse block
else
return estimateSizeDenseOnDisk(lrlen, lclen); //dense block
}
else
{
//write dense to *
if(denseBlock==null || lnonZeros==0)
return HEADER_SIZE; //empty block
else if( lnonZeros<lrlen && sparseDst )
return estimateSizeUltraSparseOnDisk(lrlen, lclen, lnonZeros); //ultra sparse block
else if( sparseDst )
return estimateSizeSparseOnDisk(lrlen, lclen, lnonZeros); //sparse block
else
return estimateSizeDenseOnDisk(lrlen, lclen); //dense block
}
}
////////
// Estimates size and sparsity
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
public static long estimateSizeInMemory(long nrows, long ncols, double sparsity)
{
//determine sparse/dense representation
boolean sparse = evalSparseFormatInMemory(nrows, ncols, (long)(sparsity*nrows*ncols));
//estimate memory consumption for sparse/dense
if( sparse )
return estimateSizeSparseInMemory(nrows, ncols, sparsity);
else
return estimateSizeDenseInMemory(nrows, ncols);
}
/**
*
* @param nrows
* @param ncols
* @return
*/
private static long estimateSizeDenseInMemory(long nrows, long ncols)
{
// basic variables and references sizes
long size = 44;
// core dense matrix block (double array)
size += nrows * ncols * 8;
return size;
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
private static long estimateSizeSparseInMemory(long nrows, long ncols, double sparsity)
{
// basic variables and references sizes
long size = 44;
//NOTES:
// * Each sparse row has a fixed overhead of 8B (reference) + 32B (object) +
// 12B (3 int members), 32B (overhead int array), 32B (overhead double array),
// * Each non-zero value requires 12B for the column-index/value pair.
// * Overheads for arrays, objects, and references refer to 64bit JVMs
// * If nnz < than rows we have only also empty rows.
//account for sparsity and initial capacity
long cnnz = Math.max(SparseRow.initialCapacity, (long)Math.ceil(sparsity*ncols));
long rlen = Math.min(nrows, (long) Math.ceil(sparsity*nrows*ncols));
size += rlen * ( 116 + 12 * cnnz ); //sparse row
size += nrows * 8; //empty rows
//OLD ESTIMATE:
//int len = Math.max(SparseRow.initialCapacity, (int)Math.ceil(sparsity*ncols));
//size += nrows * (28 + 12 * len );
return size;
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
public static long estimateSizeOnDisk( long nrows, long ncols, long nnz )
{
//determine sparse/dense representation
boolean sparse = evalSparseFormatOnDisk(nrows, ncols, nnz);
//estimate memory consumption for sparse/dense
if( sparse && nnz<nrows )
return estimateSizeUltraSparseOnDisk(nrows, ncols, nnz);
else if( sparse )
return estimateSizeSparseOnDisk(nrows, ncols, nnz);
else
return estimateSizeDenseOnDisk(nrows, ncols);
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
private static long estimateSizeDenseOnDisk( long nrows, long ncols)
{
//basic header (int rlen, int clen, byte type)
long size = HEADER_SIZE;
//data (all cells double)
size += nrows * ncols * 8;
return size;
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
private static long estimateSizeSparseOnDisk( long nrows, long ncols, long nnz )
{
//basic header: (int rlen, int clen, byte type)
long size = HEADER_SIZE;
//extended header (long nnz)
size += (nrows*ncols > Integer.MAX_VALUE) ? 8 : 4;
//data: (int num per row, int-double pair per non-zero value)
size += nrows * 4 + nnz * 12;
return size;
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
private static long estimateSizeUltraSparseOnDisk( long nrows, long ncols, long nnz )
{
//basic header (int rlen, int clen, byte type)
long size = HEADER_SIZE;
//extended header (int nnz, guaranteed by rlen<nnz)
size += 4;
//data (int-int-double triples per non-zero value)
size += nnz * 16;
return size;
}
public static SparsityEstimate estimateSparsityOnAggBinary(MatrixBlock m1, MatrixBlock m2, AggregateBinaryOperator op)
{
//NOTE: since MatrixMultLib always uses a dense intermediate output
//with subsequent check for sparsity, we should always return a dense estimate.
//Once, we support more aggregate binary operations, we need to change this.
return new SparsityEstimate(false, m1.getNumRows()*m2.getNumRows());
/*
SparsityEstimate est=new SparsityEstimate();
double m=m2.getNumColumns();
//handle vectors specially
//if result is a column vector, use dense format, otherwise use the normal process to decide
if ( !op.sparseSafe || m <=SKINNY_MATRIX_TURN_POINT)
{
est.sparse=false;
}
else
{
double n=m1.getNumRows();
double k=m1.getNumColumns();
double nz1=m1.getNonZeros();
double nz2=m2.getNonZeros();
double pq=nz1*nz2/n/k/k/m;
double estimated= 1-Math.pow(1-pq, k);
est.sparse=(estimated < SPARCITY_TURN_POINT);
est.estimatedNonZeros=(int)(estimated*n*m);
}
return est;
*/
}
/**
*
* @param m1
* @param m2
* @param op
* @return
*/
private static SparsityEstimate estimateSparsityOnBinary(MatrixBlock m1, MatrixBlock m2, BinaryOperator op)
{
SparsityEstimate est=new SparsityEstimate();
//if result is a column vector, use dense format, otherwise use the normal process to decide
if(!op.sparseSafe ) {
est.sparse = false;
return est;
}
BinaryAccessType atype = LibMatrixBincell.getBinaryAccessType(m1, m2);
boolean outer = (atype == BinaryAccessType.OUTER_VECTOR_VECTOR);
long m = m1.getNumRows();
long n = outer ? m2.getNumColumns() : m1.getNumColumns();
long nz1 = m1.getNonZeros();
long nz2 = m2.getNonZeros();
//account for matrix vector and vector/vector
long estnnz = 0;
if( atype == BinaryAccessType.OUTER_VECTOR_VECTOR )
{
//for outer vector operations the sparsity estimate is exactly known
estnnz = nz1 * nz2;
}
else //DEFAULT CASE
{
if( atype == BinaryAccessType.MATRIX_COL_VECTOR )
nz2 = nz2 * n;
else if( atype == BinaryAccessType.MATRIX_ROW_VECTOR )
nz2 = nz2 * m;
if(op.fn instanceof And || op.fn instanceof Multiply)
estnnz = Math.min(nz1, nz2); //worstcase wrt overlap
else
estnnz = nz1+nz2; //worstcase wrt operation
}
est.sparse = evalSparseFormatInMemory(m, n, estnnz);
est.estimatedNonZeros = estnnz;
return est;
}
private boolean estimateSparsityOnSlice(int selectRlen, int selectClen, int finalRlen, int finalClen)
{
long ennz = (long)((double)nonZeros/rlen/clen*selectRlen*selectClen);
return evalSparseFormatInMemory(finalRlen, finalClen, ennz);
}
private boolean estimateSparsityOnLeftIndexing(long rlenm1, long clenm1, long nnzm1, long rlenm2, long clenm2, long nnzm2)
{
//min bound: nnzm1 - rlenm2*clenm2 + nnzm2
//max bound: min(rlenm1*rlenm2, nnzm1+nnzm2)
long ennz = Math.min(rlenm1*clenm1, nnzm1+nnzm2);
return evalSparseFormatInMemory(rlenm1, clenm1, ennz);
}
private boolean estimateSparsityOnGroupedAgg( long rlen, long groups )
{
long ennz = Math.min(groups, rlen);
return evalSparseFormatInMemory(groups, 1, ennz);
}
////////
// Core block operations (called from instructions)
public MatrixValue scalarOperations(ScalarOperator op, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock ret = checkType(result);
// estimate the sparsity structure of result matrix
boolean sp = this.sparse; // by default, we guess result.sparsity=input.sparsity
if (!op.sparseSafe)
sp = false; // if the operation is not sparse safe, then result will be in dense format
//allocate the output matrix block
if( ret==null )
ret = new MatrixBlock(rlen, clen, sp, this.nonZeros);
else
ret.reset(rlen, clen, sp, this.nonZeros);
//core scalar operations
LibMatrixBincell.bincellOp(this, ret, op);
return ret;
}
public MatrixValue unaryOperations(UnaryOperator op, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
checkType(result);
// estimate the sparsity structure of result matrix
boolean sp = this.sparse; // by default, we guess result.sparsity=input.sparsity
if (!op.sparseSafe)
sp = false; // if the operation is not sparse safe, then result will be in dense format
//allocate output
if(result==null)
result=new MatrixBlock(rlen, clen, sp, this.nonZeros);
else
result.reset(rlen, clen, sp);
//core execute
if( LibMatrixAgg.isSupportedUnaryOperator(op) ) //e.g., cumsum
{
LibMatrixAgg.aggregateUnaryMatrix(this, (MatrixBlock)result, op);
}
else
{
result.copy(this);
((MatrixBlock)result).unaryOperationsInPlace(op);
}
return result;
}
public void unaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if(op.sparseSafe)
sparseUnaryOperationsInPlace(op);
else
denseUnaryOperationsInPlace(op);
}
/**
* only apply to non zero cells
*
* @param op
* @throws DMLUnsupportedOperationException
* @throws DMLRuntimeException
*/
private void sparseUnaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//early abort possible since sparse-safe
if( isEmptyBlock(false) )
return;
if(sparse)
{
nonZeros=0;
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
double[] values=sparseRows[r].getValueContainer();
int[] cols=sparseRows[r].getIndexContainer();
int pos=0;
for(int i=0; i<sparseRows[r].size(); i++)
{
double v=op.fn.execute(values[i]);
if(v!=0)
{
values[pos]=v;
cols[pos]=cols[i];
pos++;
nonZeros++;
}
}
sparseRows[r].truncate(pos);
}
}
else
{
int limit=rlen*clen;
nonZeros=0;
for(int i=0; i<limit; i++)
{
denseBlock[i]=op.fn.execute(denseBlock[i]);
if(denseBlock[i]!=0)
nonZeros++;
}
}
}
private void denseUnaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if( sparse ) //SPARSE MATRIX
{
double v;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
v=op.fn.execute(quickGetValue(r, c));
quickSetValue(r, c, v);
}
}
else//DENSE MATRIX
{
//early abort not possible because not sparsesafe
if(denseBlock==null)
allocateDenseBlock();
int limit=rlen*clen;
nonZeros=0;
for(int i=0; i<limit; i++)
{
denseBlock[i]=op.fn.execute(denseBlock[i]);
if(denseBlock[i]!=0)
nonZeros++;
}
}
}
/**
*
*/
public MatrixValue binaryOperations(BinaryOperator op, MatrixValue thatValue, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that = checkType(thatValue);
MatrixBlock ret = checkType(result);
if( !LibMatrixBincell.isValidDimensionsBinary(this, that) ) {
throw new RuntimeException("block sizes are not matched for binary " +
"cell operations: "+this.rlen+"x"+this.clen+" vs "+ that.rlen+"x"+that.clen);
}
//compute output dimensions
boolean outer = (LibMatrixBincell.getBinaryAccessType(this, that)
== BinaryAccessType.OUTER_VECTOR_VECTOR);
int rows = rlen;
int cols = outer ? that.clen : clen;
//estimate output sparsity
SparsityEstimate resultSparse = estimateSparsityOnBinary(this, that, op);
if( ret == null )
ret = new MatrixBlock(rows, cols, resultSparse.sparse, resultSparse.estimatedNonZeros);
else
ret.reset(rows, cols, resultSparse.sparse, resultSparse.estimatedNonZeros);
//core binary cell operation
LibMatrixBincell.bincellOp( this, that, ret, op );
return ret;
}
/**
*
*/
public void binaryOperationsInPlace(BinaryOperator op, MatrixValue thatValue)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that=checkType(thatValue);
if( !LibMatrixBincell.isValidDimensionsBinary(this, that) ) {
throw new RuntimeException("block sizes are not matched for binary " +
"cell operations: "+this.rlen+"*"+this.clen+" vs "+ that.rlen+"*"+that.clen);
}
//estimate output sparsity
SparsityEstimate resultSparse = estimateSparsityOnBinary(this, that, op);
if(resultSparse.sparse && !this.sparse)
denseToSparse();
else if(!resultSparse.sparse && this.sparse)
sparseToDense();
//core binary cell operation
LibMatrixBincell.bincellOpInPlace(this, that, op);
}
public void incrementalAggregate(AggregateOperator aggOp, MatrixValue correction,
MatrixValue newWithCorrection)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//assert(aggOp.correctionExists);
MatrixBlock cor=checkType(correction);
MatrixBlock newWithCor=checkType(newWithCorrection);
KahanObject buffer=new KahanObject(0, 0);
if(aggOp.correctionLocation==CorrectionLocationType.LASTROW)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=cor.quickGetValue(0, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c),
newWithCor.quickGetValue(r+1, c));
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(0, c, buffer._correction);
}
}else if(aggOp.correctionLocation==CorrectionLocationType.LASTCOLUMN)
{
if(aggOp.increOp.fn instanceof Builtin
&& ( ((Builtin)(aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MAXINDEX
|| ((Builtin)(aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MININDEX )
){
// *** HACK ALERT *** HACK ALERT *** HACK ALERT ***
// rowIndexMax() and its siblings don't fit very well into the standard
// aggregate framework. We (ab)use the "correction factor" argument to
// hold the maximum value in each row/column.
// The execute() method for this aggregate takes as its argument
// two candidates for the highest value. Bookkeeping about
// indexes (return column/row index with highest value, breaking
// ties in favor of higher indexes) is handled in this function.
// Note that both versions of incrementalAggregate() contain
// very similar blocks of special-case code. If one block is
// modified, the other needs to be changed to match.
for(int r=0; r<rlen; r++){
double currMaxValue = cor.quickGetValue(r, 0);
long newMaxIndex = (long)newWithCor.quickGetValue(r, 0);
double newMaxValue = newWithCor.quickGetValue(r, 1);
double update = aggOp.increOp.fn.execute(newMaxValue, currMaxValue);
if (2.0 == update) {
// Return value of 2 ==> both values the same, break ties
// in favor of higher index.
long curMaxIndex = (long) quickGetValue(r,0);
quickSetValue(r, 0, Math.max(curMaxIndex, newMaxIndex));
} else if(1.0 == update){
// Return value of 1 ==> new value is better; use its index
quickSetValue(r, 0, newMaxIndex);
cor.quickSetValue(r, 0, newMaxValue);
} else {
// Other return value ==> current answer is best
}
}
// *** END HACK ***
}else{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=cor.quickGetValue(r, 0);;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c), newWithCor.quickGetValue(r, c+1));
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(r, 0, buffer._correction);
}
}
}
else if(aggOp.correctionLocation==CorrectionLocationType.NONE)
{
//e.g., ak+ kahan plus as used in sum, mapmult, mmcj and tsmm
if(aggOp.increOp.fn instanceof KahanPlus)
{
LibMatrixAgg.aggregateBinaryMatrix(newWithCor, this, cor);
}
else
{
if( newWithCor.isInSparseFormat() && aggOp.sparseSafe ) //SPARSE
{
SparseRow[] bRows = newWithCor.getSparseRows();
if( bRows==null ) //early abort on empty block
return;
for( int r=0; r<Math.min(rlen, bRows.length); r++ )
{
SparseRow brow = bRows[r];
if( brow != null && !brow.isEmpty() )
{
int blen = brow.size();
int[] bix = brow.getIndexContainer();
double[] bvals = brow.getValueContainer();
for( int j=0; j<blen; j++)
{
int c = bix[j];
buffer._sum = this.quickGetValue(r, c);
buffer._correction = cor.quickGetValue(r, c);
buffer = (KahanObject) aggOp.increOp.fn.execute(buffer, bvals[j]);
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(r, c, buffer._correction);
}
}
}
}
else //DENSE or SPARSE (!sparsesafe)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=cor.quickGetValue(r, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c));
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(r, c, buffer._correction);
}
}
//change representation if required
//(note since ak+ on blocks is currently only applied in MR, hence no need to account for this in mem estimates)
examSparsity();
}
}
else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOROWS)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
n=cor.quickGetValue(0, c);
buffer._correction=cor.quickGetValue(1, c);
mu2=newWithCor.quickGetValue(r, c);
n2=newWithCor.quickGetValue(r+1, c);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(0, c, n);
cor.quickSetValue(1, c, buffer._correction);
}
}else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
n=cor.quickGetValue(r, 0);
buffer._correction=cor.quickGetValue(r, 1);
mu2=newWithCor.quickGetValue(r, c);
n2=newWithCor.quickGetValue(r, c+1);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(r, 0, n);
cor.quickSetValue(r, 1, buffer._correction);
}
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}
public void incrementalAggregate(AggregateOperator aggOp, MatrixValue newWithCorrection)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//assert(aggOp.correctionExists);
MatrixBlock newWithCor=checkType(newWithCorrection);
KahanObject buffer=new KahanObject(0, 0);
if(aggOp.correctionLocation==CorrectionLocationType.LASTROW)
{
if( aggOp.increOp.fn instanceof KahanPlus )
{
LibMatrixAgg.aggregateBinaryMatrix(newWithCor, this, aggOp);
}
else
{
for(int r=0; r<rlen-1; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=this.quickGetValue(r+1, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c),
newWithCor.quickGetValue(r+1, c));
quickSetValue(r, c, buffer._sum);
quickSetValue(r+1, c, buffer._correction);
}
}
}
else if(aggOp.correctionLocation==CorrectionLocationType.LASTCOLUMN)
{
if(aggOp.increOp.fn instanceof Builtin
&& ( ((Builtin)(aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MAXINDEX
|| ((Builtin)(aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MININDEX)
){
// *** HACK ALERT *** HACK ALERT *** HACK ALERT ***
// rowIndexMax() and its siblings don't fit very well into the standard
// aggregate framework. We (ab)use the "correction factor" argument to
// hold the maximum value in each row/column.
// The execute() method for this aggregate takes as its argument
// two candidates for the highest value. Bookkeeping about
// indexes (return column/row index with highest value, breaking
// ties in favor of higher indexes) is handled in this function.
// Note that both versions of incrementalAggregate() contain
// very similar blocks of special-case code. If one block is
// modified, the other needs to be changed to match.
for(int r = 0; r < rlen; r++){
double currMaxValue = quickGetValue(r, 1);
long newMaxIndex = (long)newWithCor.quickGetValue(r, 0);
double newMaxValue = newWithCor.quickGetValue(r, 1);
double update = aggOp.increOp.fn.execute(newMaxValue, currMaxValue);
if (2.0 == update) {
// Return value of 2 ==> both values the same, break ties
// in favor of higher index.
long curMaxIndex = (long) quickGetValue(r,0);
quickSetValue(r, 0, Math.max(curMaxIndex, newMaxIndex));
} else if(1.0 == update){
// Return value of 1 ==> new value is better; use its index
quickSetValue(r, 0, newMaxIndex);
quickSetValue(r, 1, newMaxValue);
} else {
// Other return value ==> current answer is best
}
}
// *** END HACK ***
}
else
{
if(aggOp.increOp.fn instanceof KahanPlus)
{
LibMatrixAgg.aggregateBinaryMatrix(newWithCor, this, aggOp);
}
else
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen-1; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=this.quickGetValue(r, c+1);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c), newWithCor.quickGetValue(r, c+1));
quickSetValue(r, c, buffer._sum);
quickSetValue(r, c+1, buffer._correction);
}
}
}
}/*else if(aggOp.correctionLocation==0)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
//buffer._sum=this.getValue(r, c);
//buffer._correction=0;
//buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.getValue(r, c));
setValue(r, c, this.getValue(r, c)+newWithCor.getValue(r, c));
}
}*/else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOROWS)
{
double n, n2, mu2;
for(int r=0; r<rlen-2; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
n=this.quickGetValue(r+1, c);
buffer._correction=this.quickGetValue(r+2, c);
mu2=newWithCor.quickGetValue(r, c);
n2=newWithCor.quickGetValue(r+1, c);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
quickSetValue(r, c, buffer._sum);
quickSetValue(r+1, c, n);
quickSetValue(r+2, c, buffer._correction);
}
}else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen-2; c++)
{
buffer._sum=this.quickGetValue(r, c);
n=this.quickGetValue(r, c+1);
buffer._correction=this.quickGetValue(r, c+2);
mu2=newWithCor.quickGetValue(r, c);
n2=newWithCor.quickGetValue(r, c+1);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
quickSetValue(r, c, buffer._sum);
quickSetValue(r, c+1, n);
quickSetValue(r, c+2, buffer._correction);
}
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}
@Override
public MatrixValue reorgOperations(ReorgOperator op, MatrixValue ret, int startRow, int startColumn, int length)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if ( !( op.fn instanceof SwapIndex || op.fn instanceof DiagIndex || op.fn instanceof SortIndex) )
throw new DMLRuntimeException("the current reorgOperations cannot support: "+op.fn.getClass()+".");
MatrixBlock result=checkType(ret);
CellIndex tempCellIndex = new CellIndex(-1,-1);
boolean reducedDim=op.fn.computeDimension(rlen, clen, tempCellIndex);
boolean sps;
if(reducedDim)
sps = false;
else if(op.fn.equals(DiagIndex.getDiagIndexFnObject()))
sps = true;
else
sps = this.evalSparseFormatInMemory(true);
if(result==null)
result=new MatrixBlock(tempCellIndex.row, tempCellIndex.column, sps, this.nonZeros);
else
result.reset(tempCellIndex.row, tempCellIndex.column, sps, this.nonZeros);
if( LibMatrixReorg.isSupportedReorgOperator(op) )
{
//SPECIAL case (operators with special performance requirements,
//or size-dependent special behavior)
//currently supported opcodes: r', rdiag, rsort
LibMatrixReorg.reorg(this, result, op);
}
else
{
//GENERIC case (any reorg operator)
CellIndex temp = new CellIndex(0, 0);
if(sparse)
{
if(sparseRows!=null)
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
tempCellIndex.set(r, cols[i]);
op.fn.execute(tempCellIndex, temp);
result.appendValue(temp.row, temp.column, values[i]);
}
}
}
}
else
{
if( denseBlock != null )
{
if( result.isInSparseFormat() ) //SPARSE<-DENSE
{
double[] a = denseBlock;
for( int i=0, aix=0; i<rlen; i++ )
for( int j=0; j<clen; j++, aix++ )
{
temp.set(i, j);
op.fn.execute(temp, temp);
result.appendValue(temp.row, temp.column, a[aix]);
}
}
else //DENSE<-DENSE
{
result.allocateDenseBlock();
Arrays.fill(result.denseBlock, 0);
double[] a = denseBlock;
double[] c = result.denseBlock;
int n = result.clen;
for( int i=0, aix=0; i<rlen; i++ )
for( int j=0; j<clen; j++, aix++ )
{
temp.set(i, j);
op.fn.execute(temp, temp);
c[temp.row*n+temp.column] = a[aix];
}
result.nonZeros = nonZeros;
}
}
}
}
return result;
}
/**
*
* @param that
* @param ret
* @return
* @throws DMLUnsupportedOperationException
* @throws DMLRuntimeException
*/
public MatrixBlock appendOperations( MatrixBlock that, MatrixBlock ret )
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock result = checkType( ret );
final int m = rlen;
final int n = clen+that.clen;
final long nnz = nonZeros+that.nonZeros;
boolean sp = evalSparseFormatInMemory(m, n, nnz);
//init result matrix
if( result == null )
result = new MatrixBlock(m, n, sp, nnz);
else
result.reset(m, n, sp, nnz);
//core append operation
//copy left and right input into output
if( !result.sparse ) //DENSE
{
result.copy(0, m-1, 0, clen-1, this, false);
result.copy(0, m-1, clen, n-1, that, false);
}
else //SPARSE
{
//adjust sparse rows if required
if( !this.isEmptyBlock(false) || !that.isEmptyBlock(false) )
result.allocateSparseRowsBlock();
result.appendToSparse(this, 0, 0);
result.appendToSparse(that, 0, clen);
}
result.nonZeros = nnz;
return result;
}
/**
*
* @param out
* @param tstype
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue transposeSelfMatrixMultOperations( MatrixBlock out, MMTSJType tstype )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//check for transpose type
if( !(tstype == MMTSJType.LEFT || tstype == MMTSJType.RIGHT) )
throw new DMLRuntimeException("Invalid MMTSJ type '"+tstype.toString()+"'.");
//compute matrix mult
boolean leftTranspose = ( tstype == MMTSJType.LEFT );
LibMatrixMult.matrixMultTransposeSelf(this, out, leftTranspose);
return out;
}
/**
*
* @param v
* @param w
* @param out
* @param ctype
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue chainMatrixMultOperations( MatrixBlock v, MatrixBlock w, MatrixBlock out, ChainType ctype )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
return chainMatrixMultOperations(v, w, out, ctype, 1);
}
/**
*
* @param v
* @param w
* @param out
* @param ctype
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue chainMatrixMultOperations( MatrixBlock v, MatrixBlock w, MatrixBlock out, ChainType ctype, int k )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//check for transpose type
if( !(ctype == ChainType.XtXv || ctype == ChainType.XtwXv) )
throw new DMLRuntimeException("Invalid mmchain type '"+ctype.toString()+"'.");
//check for matching dimensions
if( this.getNumColumns() != v.getNumRows() )
throw new DMLRuntimeException("Dimensions mismatch on mmchain operation ("+this.getNumColumns()+" != "+v.getNumRows()+")");
if( v!=null && v.getNumColumns() != 1 )
throw new DMLRuntimeException("Invalid input vector (column vector expected, but ncol="+v.getNumColumns()+")");
if( w!=null && w.getNumColumns() != 1 )
throw new DMLRuntimeException("Invalid weight vector (column vector expected, but ncol="+w.getNumColumns()+")");
//prepare result
if( out != null )
out.reset(clen, 1, false);
else
out = new MatrixBlock(clen, 1, false);
//compute matrix mult
if( k > 1 )
LibMatrixMult.matrixMultChain(this, v, w, out, ctype, k);
else
LibMatrixMult.matrixMultChain(this, v, w, out, ctype);
return out;
}
/**
*
* @param m1Val
* @param m2Val
* @param out1Val
* @param out2Val
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public void permutationMatrixMultOperations( MatrixValue m2Val, MatrixValue out1Val, MatrixValue out2Val )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//check input types and dimensions
MatrixBlock m2 = checkType(m2Val);
MatrixBlock ret1 = checkType(out1Val);
MatrixBlock ret2 = checkType(out2Val);
if(this.rlen!=m2.rlen)
throw new RuntimeException("Dimensions do not match for permutation matrix multiplication ("+this.rlen+"!="+m2.rlen+").");
//compute permutation matrix multiplication
LibMatrixMult.matrixMultPermute(this, m2, ret1, ret2);
}
/**
* Method to perform leftIndexing operation for a given lower and upper bounds in row and column dimensions.
* Updated matrix is returned as the output.
*
* Operations to be performed:
* 1) result=this;
* 2) result[rowLower:rowUpper, colLower:colUpper] = rhsMatrix;
*
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue leftIndexingOperations(MatrixValue rhsMatrix, long rowLower, long rowUpper,
long colLower, long colUpper, MatrixValue ret, boolean inplace)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
// Check the validity of bounds
if ( rowLower < 1 || rowLower > getNumRows() || rowUpper < rowLower || rowUpper > getNumRows()
|| colLower < 1 || colUpper > getNumColumns() || colUpper < colLower || colUpper > getNumColumns() ) {
throw new DMLRuntimeException("Invalid values for matrix indexing: " +
"["+rowLower+":"+rowUpper+"," + colLower+":"+colUpper+"] " +
"must be within matrix dimensions ["+getNumRows()+","+getNumColumns()+"].");
}
if ( (rowUpper-rowLower+1) < rhsMatrix.getNumRows() || (colUpper-colLower+1) < rhsMatrix.getNumColumns()) {
throw new DMLRuntimeException("Invalid values for matrix indexing: " +
"dimensions of the source matrix ["+rhsMatrix.getNumRows()+"x" + rhsMatrix.getNumColumns() + "] " +
"do not match the shape of the matrix specified by indices [" +
rowLower +":" + rowUpper + ", " + colLower + ":" + colUpper + "].");
}
MatrixBlock result=checkType(ret);
boolean sp = estimateSparsityOnLeftIndexing(rlen, clen, nonZeros,
rhsMatrix.getNumRows(), rhsMatrix.getNumColumns(), rhsMatrix.getNonZeros());
if( !inplace ) //general case
{
if(result==null)
result=new MatrixBlock(rlen, clen, sp);
else
result.reset(rlen, clen, sp);
result.copy(this, sp);
}
else //update in-place
result = this;
//NOTE conceptually we could directly use a zeroout and copy(..., false) but
// since this was factors slower, we still use a full copy and subsequently
// copy(..., true) - however, this can be changed in the future once we
// improved the performance of zeroout.
//result = (MatrixBlockDSM) zeroOutOperations(result, new IndexRange(rowLower,rowUpper, colLower, colUpper ), false);
int rl = (int)rowLower-1;
int ru = (int)rowUpper-1;
int cl = (int)colLower-1;
int cu = (int)colUpper-1;
MatrixBlock src = (MatrixBlock)rhsMatrix;
if(rl==ru && cl==cu) //specific case: cell update
{
//copy single value and update nnz
result.quickSetValue(rl, cl, src.quickGetValue(0, 0));
}
else //general case
{
//copy submatrix into result
result.copy(rl, ru, cl, cu, src, true);
}
return result;
}
/**
* Explicitly allow left indexing for scalars.
*
* * Operations to be performed:
* 1) result=this;
* 2) result[row,column] = scalar.getDoubleValue();
*
* @param scalar
* @param row
* @param col
* @param ret
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue leftIndexingOperations(ScalarObject scalar, long row, long col, MatrixValue ret, boolean inplace)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
MatrixBlock result=checkType(ret);
double inVal = scalar.getDoubleValue();
boolean sp = estimateSparsityOnLeftIndexing(rlen, clen, nonZeros, 1, 1, (inVal!=0)?1:0);
if( !inplace ) //general case
{
if(result==null)
result=new MatrixBlock(rlen, clen, sp);
else
result.reset(rlen, clen, sp);
result.copy(this, sp);
}
else //update in-place
result = this;
int rl = (int)row-1;
int cl = (int)col-1;
result.quickSetValue(rl, cl, inVal);
return result;
}
/**
* Method to perform rangeReIndex operation for a given lower and upper bounds in row and column dimensions.
* Extracted submatrix is returned as "result".
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue sliceOperations(long rowLower, long rowUpper, long colLower, long colUpper, MatrixValue ret)
throws DMLRuntimeException, DMLUnsupportedOperationException {
// check the validity of bounds
if ( rowLower < 1 || rowLower > getNumRows() || rowUpper < rowLower || rowUpper > getNumRows()
|| colLower < 1 || colUpper > getNumColumns() || colUpper < colLower || colUpper > getNumColumns() ) {
throw new DMLRuntimeException("Invalid values for matrix indexing: " +
"["+rowLower+":"+rowUpper+"," + colLower+":"+colUpper+"] " +
"must be within matrix dimensions ["+getNumRows()+","+getNumColumns()+"]");
}
int rl = (int)rowLower-1;
int ru = (int)rowUpper-1;
int cl = (int)colLower-1;
int cu = (int)colUpper-1;
//System.out.println(" -- performing slide on [" + getNumRows() + "x" + getNumColumns() + "] with ["+rl+":"+ru+","+cl+":"+cu+"].");
// Output matrix will have the same sparsity as that of the input matrix.
// (assuming a uniform distribution of non-zeros in the input)
MatrixBlock result=checkType(ret);
long estnnz= (long) ((double)this.nonZeros/rlen/clen*(ru-rl+1)*(cu-cl+1));
boolean result_sparsity = this.sparse && MatrixBlock.evalSparseFormatInMemory(ru-rl+1, cu-cl+1, estnnz);
if(result==null)
result=new MatrixBlock(ru-rl+1, cu-cl+1, result_sparsity, estnnz);
else
result.reset(ru-rl+1, cu-cl+1, result_sparsity, estnnz);
// actual slice operation
if( rowLower==1 && rowUpper==rlen && colLower==1 && colUpper==clen ) {
// copy if entire matrix required
result.copy( this );
}
else //general case
{
//core slicing operation (nnz maintained internally)
if (sparse)
sliceSparse(rl, ru, cl, cu, result);
else
sliceDense(rl, ru, cl, cu, result);
}
return result;
}
/**
*
* @param rl
* @param ru
* @param cl
* @param cu
* @param dest
* @throws DMLRuntimeException
*/
private void sliceSparse(int rl, int ru, int cl, int cu, MatrixBlock dest)
throws DMLRuntimeException
{
//check for early abort
if( isEmptyBlock(false) )
return;
if( cl==cu ) //COLUMN VECTOR
{
//note: always dense dest
dest.allocateDenseBlock();
for( int i=rl; i<=ru; i++ ) {
SparseRow arow = sparseRows[i];
if( arow != null && !arow.isEmpty() ) {
double val = arow.get(cl);
if( val != 0 ) {
dest.denseBlock[i-rl] = val;
dest.nonZeros++;
}
}
}
}
else if( rl==ru && cl==0 && cu==clen-1 ) //ROW VECTOR
{
//note: always sparse dest, but also works for dense
dest.appendRow(0, sparseRows[rl]);
}
else //general case (sparse/dense dest)
{
for(int i=rl; i <= ru; i++)
if(sparseRows[i] != null && !sparseRows[i].isEmpty())
{
SparseRow arow = sparseRows[i];
int alen = arow.size();
int[] aix = arow.getIndexContainer();
double[] avals = arow.getValueContainer();
int astart = (cl>0)?arow.searchIndexesFirstGTE(cl):0;
if( astart != -1 )
for( int j=astart; j<alen && aix[j] <= cu; j++ )
dest.appendValue(i-rl, aix[j]-cl, avals[j]);
}
}
}
/**
*
* @param rl
* @param ru
* @param cl
* @param cu
* @param dest
* @throws DMLRuntimeException
*/
private void sliceDense(int rl, int ru, int cl, int cu, MatrixBlock dest)
throws DMLRuntimeException
{
//ensure allocated input/output blocks
if( denseBlock == null )
return;
dest.allocateDenseBlock();
//indexing operation
if( cl==cu ) //COLUMN INDEXING
{
if( clen==1 ) //vector -> vector
{
System.arraycopy(denseBlock, rl, dest.denseBlock, 0, ru-rl+1);
}
else //matrix -> vector
{
//IBM JVM bug (JDK7) causes crash for certain cl/cu values (e.g., divide by zero for 4)
//for( int i=rl*clen+cl, ix=0; i<=ru*clen+cu; i+=clen, ix++ )
// dest.denseBlock[ix] = denseBlock[i];
int len = clen;
for( int i=rl*len+cl, ix=0; i<=ru*len+cu; i+=len, ix++ )
dest.denseBlock[ix] = denseBlock[i];
}
}
else // GENERAL RANGE INDEXING
{
//IBM JVM bug (JDK7) causes crash for certain cl/cu values (e.g., divide by zero for 4)
//for(int i = rl, ix1 = rl*clen+cl, ix2=0; i <= ru; i++, ix1+=clen, ix2+=dest.clen)
// System.arraycopy(denseBlock, ix1, dest.denseBlock, ix2, dest.clen);
int len1 = clen;
int len2 = dest.clen;
for(int i = rl, ix1 = rl*len1+cl, ix2=0; i <= ru; i++, ix1+=len1, ix2+=len2)
System.arraycopy(denseBlock, ix1, dest.denseBlock, ix2, len2);
}
//compute nnz of output (not maintained due to native calls)
dest.recomputeNonZeros();
}
public void sliceOperations(ArrayList<IndexedMatrixValue> outlist, IndexRange range, int rowCut, int colCut,
int normalBlockRowFactor, int normalBlockColFactor, int boundaryRlen, int boundaryClen)
{
MatrixBlock topleft=null, topright=null, bottomleft=null, bottomright=null;
Iterator<IndexedMatrixValue> p=outlist.iterator();
int blockRowFactor=normalBlockRowFactor, blockColFactor=normalBlockColFactor;
if(rowCut>range.rowEnd)
blockRowFactor=boundaryRlen;
if(colCut>range.colEnd)
blockColFactor=boundaryClen;
int minrowcut=(int)Math.min(rowCut,range.rowEnd);
int mincolcut=(int)Math.min(colCut, range.colEnd);
int maxrowcut=(int)Math.max(rowCut, range.rowStart);
int maxcolcut=(int)Math.max(colCut, range.colStart);
if(range.rowStart<rowCut && range.colStart<colCut)
{
topleft=(MatrixBlock) p.next().getValue();
//topleft.reset(blockRowFactor, blockColFactor,
// checkSparcityOnSlide(rowCut-(int)range.rowStart, colCut-(int)range.colStart, blockRowFactor, blockColFactor));
topleft.reset(blockRowFactor, blockColFactor,
estimateSparsityOnSlice(minrowcut-(int)range.rowStart, mincolcut-(int)range.colStart, blockRowFactor, blockColFactor));
}
if(range.rowStart<rowCut && range.colEnd>=colCut)
{
topright=(MatrixBlock) p.next().getValue();
topright.reset(blockRowFactor, boundaryClen,
estimateSparsityOnSlice(minrowcut-(int)range.rowStart, (int)range.colEnd-maxcolcut+1, blockRowFactor, boundaryClen));
}
if(range.rowEnd>=rowCut && range.colStart<colCut)
{
bottomleft=(MatrixBlock) p.next().getValue();
bottomleft.reset(boundaryRlen, blockColFactor,
estimateSparsityOnSlice((int)range.rowEnd-maxrowcut+1, mincolcut-(int)range.colStart, boundaryRlen, blockColFactor));
}
if(range.rowEnd>=rowCut && range.colEnd>=colCut)
{
bottomright=(MatrixBlock) p.next().getValue();
bottomright.reset(boundaryRlen, boundaryClen,
estimateSparsityOnSlice((int)range.rowEnd-maxrowcut+1, (int)range.colEnd-maxcolcut+1, boundaryRlen, boundaryClen));
}
if(sparse)
{
if(sparseRows!=null)
{
int r=(int)range.rowStart;
for(; r<Math.min(Math.min(rowCut, sparseRows.length), range.rowEnd+1); r++)
sliceHelp(r, range, colCut, topleft, topright, normalBlockRowFactor-rowCut, normalBlockRowFactor, normalBlockColFactor);
for(; r<=Math.min(range.rowEnd, sparseRows.length-1); r++)
sliceHelp(r, range, colCut, bottomleft, bottomright, -rowCut, normalBlockRowFactor, normalBlockColFactor);
//System.out.println("in: \n"+this);
//System.out.println("outlist: \n"+outlist);
}
}else
{
if(denseBlock!=null)
{
int i=((int)range.rowStart)*clen;
int r=(int) range.rowStart;
for(; r<Math.min(rowCut, range.rowEnd+1); r++)
{
int c=(int) range.colStart;
for(; c<Math.min(colCut, range.colEnd+1); c++)
topleft.appendValue(r+normalBlockRowFactor-rowCut, c+normalBlockColFactor-colCut, denseBlock[i+c]);
for(; c<=range.colEnd; c++)
topright.appendValue(r+normalBlockRowFactor-rowCut, c-colCut, denseBlock[i+c]);
i+=clen;
}
for(; r<=range.rowEnd; r++)
{
int c=(int) range.colStart;
for(; c<Math.min(colCut, range.colEnd+1); c++)
bottomleft.appendValue(r-rowCut, c+normalBlockColFactor-colCut, denseBlock[i+c]);
for(; c<=range.colEnd; c++)
bottomright.appendValue(r-rowCut, c-colCut, denseBlock[i+c]);
i+=clen;
}
}
}
}
private void sliceHelp(int r, IndexRange range, int colCut, MatrixBlock left, MatrixBlock right, int rowOffset, int normalBlockRowFactor, int normalBlockColFactor)
{
if(sparseRows[r]==null)
return;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
int start=sparseRows[r].searchIndexesFirstGTE((int)range.colStart);
if(start<0)
return;
int end=sparseRows[r].searchIndexesFirstLTE((int)range.colEnd);
if(end<0 || start>end)
return;
//actual slice operation
for(int i=start; i<=end; i++) {
if(cols[i]<colCut)
left.appendValue(r+rowOffset, cols[i]+normalBlockColFactor-colCut, values[i]);
else
right.appendValue(r+rowOffset, cols[i]-colCut, values[i]);
}
}
@Override
//This the append operations for MR side
//nextNCol is the number columns for the block right of block v2
public void appendOperations(MatrixValue v2,
ArrayList<IndexedMatrixValue> outlist, int blockRowFactor,
int blockColFactor, boolean m2IsLast, int nextNCol)
throws DMLUnsupportedOperationException, DMLRuntimeException {
MatrixBlock m2=(MatrixBlock)v2;
//System.out.println("second matrix: \n"+m2);
Iterator<IndexedMatrixValue> p=outlist.iterator();
if(this.clen==blockColFactor)
{
MatrixBlock first=(MatrixBlock) p.next().getValue();
first.copy(this);
MatrixBlock second=(MatrixBlock) p.next().getValue();
second.copy(m2);
}else
{
int ncol=Math.min(clen+m2.getNumColumns(), blockColFactor);
int part=ncol-clen;
MatrixBlock first=(MatrixBlock) p.next().getValue();
first.reset(rlen, ncol, this.nonZeros+m2.getNonZeros()*part/m2.getNumColumns());
//copy the first matrix
if(this.sparse)
{
if(this.sparseRows!=null)
{
for(int i=0; i<Math.min(rlen, this.sparseRows.length); i++)
{
if(this.sparseRows[i]!=null)
first.appendRow(i, this.sparseRows[i]);
}
}
}else if(this.denseBlock!=null)
{
int sindx=0;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
first.appendValue(r, c, this.denseBlock[sindx]);
sindx++;
}
}
MatrixBlock second=null;
if(part<m2.clen)
{
second=(MatrixBlock) p.next().getValue();
if(m2IsLast)
second.reset(m2.rlen, m2.clen-part, m2.sparse);
else
second.reset(m2.rlen, Math.min(m2.clen-part+nextNCol, blockColFactor), m2.sparse);
}
//copy the second
if(m2.sparse)
{
if(m2.sparseRows!=null)
{
for(int i=0; i<Math.min(m2.rlen, m2.sparseRows.length); i++)
{
if(m2.sparseRows[i]!=null)
{
int[] indexContainer=m2.sparseRows[i].getIndexContainer();
double[] valueContainer=m2.sparseRows[i].getValueContainer();
for(int j=0; j<m2.sparseRows[i].size(); j++)
{
if(indexContainer[j]<part)
first.appendValue(i, clen+indexContainer[j], valueContainer[j]);
else
second.appendValue(i, indexContainer[j]-part, valueContainer[j]);
}
}
}
}
}else if(m2.denseBlock!=null)
{
int sindx=0;
for(int r=0; r<m2.rlen; r++)
{
int c=0;
for(; c<part; c++)
{
first.appendValue(r, clen+c, m2.denseBlock[sindx+c]);
// System.out.println("access "+(sindx+c));
// System.out.println("add first ("+r+", "+(clen+c)+"), "+m2.denseBlock[sindx+c]);
}
for(; c<m2.clen; c++)
{
second.appendValue(r, c-part, m2.denseBlock[sindx+c]);
// System.out.println("access "+(sindx+c));
// System.out.println("add second ("+r+", "+(c-part)+"), "+m2.denseBlock[sindx+c]);
}
sindx+=m2.clen;
}
}
}
}
/**
*
*/
public MatrixValue zeroOutOperations(MatrixValue result, IndexRange range, boolean complementary)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
checkType(result);
double currentSparsity=(double)nonZeros/(double)rlen/(double)clen;
double estimatedSps=currentSparsity*(double)(range.rowEnd-range.rowStart+1)
*(double)(range.colEnd-range.colStart+1)/(double)rlen/(double)clen;
if(!complementary)
estimatedSps=currentSparsity-estimatedSps;
boolean lsparse = evalSparseFormatInMemory(rlen, clen, (long)(estimatedSps*rlen*clen));
if(result==null)
result=new MatrixBlock(rlen, clen, lsparse, (int)(estimatedSps*rlen*clen));
else
result.reset(rlen, clen, lsparse, (int)(estimatedSps*rlen*clen));
if(sparse)
{
if(sparseRows!=null)
{
if(!complementary)//if zero out
{
for(int r=0; r<Math.min((int)range.rowStart, sparseRows.length); r++)
((MatrixBlock) result).appendRow(r, sparseRows[r]);
for(int r=Math.min((int)range.rowEnd+1, sparseRows.length); r<Math.min(rlen, sparseRows.length); r++)
((MatrixBlock) result).appendRow(r, sparseRows[r]);
}
for(int r=(int)range.rowStart; r<=Math.min(range.rowEnd, sparseRows.length-1); r++)
{
if(sparseRows[r]==null)
continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
if(complementary)//if selection
{
int start=sparseRows[r].searchIndexesFirstGTE((int)range.colStart);
if(start<0) continue;
int end=sparseRows[r].searchIndexesFirstGT((int)range.colEnd);
if(end<0 || start>end)
continue;
for(int i=start; i<end; i++)
{
((MatrixBlock) result).appendValue(r, cols[i], values[i]);
}
}else
{
int start=sparseRows[r].searchIndexesFirstGTE((int)range.colStart);
if(start<0) start=sparseRows[r].size();
int end=sparseRows[r].searchIndexesFirstGT((int)range.colEnd);
if(end<0) end=sparseRows[r].size();
for(int i=0; i<start; i++)
{
((MatrixBlock) result).appendValue(r, cols[i], values[i]);
}
for(int i=end; i<sparseRows[r].size(); i++)
{
((MatrixBlock) result).appendValue(r, cols[i], values[i]);
}
}
}
}
}else
{
if(denseBlock!=null)
{
if(complementary)//if selection
{
int offset=((int)range.rowStart)*clen;
for(int r=(int) range.rowStart; r<=range.rowEnd; r++)
{
for(int c=(int) range.colStart; c<=range.colEnd; c++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset+c]);
offset+=clen;
}
}else
{
int offset=0;
int r=0;
for(; r<(int)range.rowStart; r++)
for(int c=0; c<clen; c++, offset++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset]);
for(; r<=(int)range.rowEnd; r++)
{
for(int c=0; c<(int)range.colStart; c++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset+c]);
for(int c=(int)range.colEnd+1; c<clen; c++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset+c]);
offset+=clen;
}
for(; r<rlen; r++)
for(int c=0; c<clen; c++, offset++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset]);
}
}
}
return result;
}
public MatrixValue aggregateUnaryOperations(AggregateUnaryOperator op, MatrixValue result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
return aggregateUnaryOperations(op, result,
blockingFactorRow, blockingFactorCol, indexesIn, false);
}
public MatrixValue aggregateUnaryOperations(AggregateUnaryOperator op, MatrixValue result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn, boolean inCP)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
CellIndex tempCellIndex = new CellIndex(-1,-1);
op.indexFn.computeDimension(rlen, clen, tempCellIndex);
if(op.aggOp.correctionExists)
{
switch(op.aggOp.correctionLocation)
{
case LASTROW:
tempCellIndex.row++;
break;
case LASTCOLUMN:
tempCellIndex.column++;
break;
case LASTTWOROWS:
tempCellIndex.row+=2;
break;
case LASTTWOCOLUMNS:
tempCellIndex.column+=2;
break;
default:
throw new DMLRuntimeException("unrecognized correctionLocation: "+op.aggOp.correctionLocation);
}
}
if(result==null)
result=new MatrixBlock(tempCellIndex.row, tempCellIndex.column, false);
else
result.reset(tempCellIndex.row, tempCellIndex.column, false);
MatrixBlock ret = (MatrixBlock) result;
if( LibMatrixAgg.isSupportedUnaryAggregateOperator(op) ) {
LibMatrixAgg.aggregateUnaryMatrix(this, ret, op);
LibMatrixAgg.recomputeIndexes(ret, op, blockingFactorRow, blockingFactorCol, indexesIn);
}
else if(op.sparseSafe)
sparseAggregateUnaryHelp(op, ret, blockingFactorRow, blockingFactorCol, indexesIn);
else
denseAggregateUnaryHelp(op, ret, blockingFactorRow, blockingFactorCol, indexesIn);
if(op.aggOp.correctionExists && inCP)
((MatrixBlock)result).dropLastRowsOrColums(op.aggOp.correctionLocation);
return ret;
}
private void sparseAggregateUnaryHelp(AggregateUnaryOperator op, MatrixBlock result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn) throws DMLRuntimeException
{
//initialize result
if(op.aggOp.initialValue!=0)
result.resetDenseWithValue(result.rlen, result.clen, op.aggOp.initialValue);
CellIndex tempCellIndex = new CellIndex(-1,-1);
KahanObject buffer=new KahanObject(0,0);
int r = 0, c = 0;
if(sparse)
{
if(sparseRows!=null)
{
for(r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
tempCellIndex.set(r, cols[i]);
op.indexFn.execute(tempCellIndex, tempCellIndex);
incrementalAggregateUnaryHelp(op.aggOp, result, tempCellIndex.row, tempCellIndex.column, values[i], buffer);
}
}
}
}
else
{
if(denseBlock!=null)
{
int limit=rlen*clen;
for(int i=0; i<limit; i++)
{
r=i/clen;
c=i%clen;
tempCellIndex.set(r, c);
op.indexFn.execute(tempCellIndex, tempCellIndex);
incrementalAggregateUnaryHelp(op.aggOp, result, tempCellIndex.row, tempCellIndex.column, denseBlock[i], buffer);
}
}
}
}
private void denseAggregateUnaryHelp(AggregateUnaryOperator op, MatrixBlock result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn) throws DMLRuntimeException
{
//initialize
if(op.aggOp.initialValue!=0)
result.resetDenseWithValue(result.rlen, result.clen, op.aggOp.initialValue);
CellIndex tempCellIndex = new CellIndex(-1,-1);
KahanObject buffer=new KahanObject(0,0);
for(int i=0; i<rlen; i++)
for(int j=0; j<clen; j++)
{
tempCellIndex.set(i, j);
op.indexFn.execute(tempCellIndex, tempCellIndex);
if(op.aggOp.correctionExists
&& op.aggOp.correctionLocation == CorrectionLocationType.LASTCOLUMN
&& op.aggOp.increOp.fn instanceof Builtin
&& ( ((Builtin)(op.aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MAXINDEX
|| ((Builtin)(op.aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MININDEX)
){
double currMaxValue = result.quickGetValue(i, 1);
long newMaxIndex = UtilFunctions.cellIndexCalculation(indexesIn.getColumnIndex(), blockingFactorCol, j);
double newMaxValue = quickGetValue(i, j);
double update = op.aggOp.increOp.fn.execute(newMaxValue, currMaxValue);
//System.out.println("currV="+currMaxValue+",newV="+newMaxValue+",newIX="+newMaxIndex+",update="+update);
if(update == 1){
result.quickSetValue(i, 0, newMaxIndex);
result.quickSetValue(i, 1, newMaxValue);
}
}else
incrementalAggregateUnaryHelp(op.aggOp, result, tempCellIndex.row, tempCellIndex.column, quickGetValue(i,j), buffer);
}
}
private void incrementalAggregateUnaryHelp(AggregateOperator aggOp, MatrixBlock result, int row, int column,
double newvalue, KahanObject buffer) throws DMLRuntimeException
{
if(aggOp.correctionExists)
{
if(aggOp.correctionLocation==CorrectionLocationType.LASTROW || aggOp.correctionLocation==CorrectionLocationType.LASTCOLUMN)
{
int corRow=row, corCol=column;
if(aggOp.correctionLocation==CorrectionLocationType.LASTROW)//extra row
corRow++;
else if(aggOp.correctionLocation==CorrectionLocationType.LASTCOLUMN)
corCol++;
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
buffer._sum=result.quickGetValue(row, column);
buffer._correction=result.quickGetValue(corRow, corCol);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newvalue);
result.quickSetValue(row, column, buffer._sum);
result.quickSetValue(corRow, corCol, buffer._correction);
}else if(aggOp.correctionLocation==CorrectionLocationType.NONE)
{
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}else// for mean
{
int corRow=row, corCol=column;
int countRow=row, countCol=column;
if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOROWS)
{
countRow++;
corRow+=2;
}
else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS)
{
countCol++;
corCol+=2;
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
buffer._sum=result.quickGetValue(row, column);
buffer._correction=result.quickGetValue(corRow, corCol);
double count=result.quickGetValue(countRow, countCol)+1.0;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newvalue, count);
result.quickSetValue(row, column, buffer._sum);
result.quickSetValue(corRow, corCol, buffer._correction);
result.quickSetValue(countRow, countCol, count);
}
}else
{
newvalue=aggOp.increOp.fn.execute(result.quickGetValue(row, column), newvalue);
result.quickSetValue(row, column, newvalue);
}
}
/**
*
* @param correctionLocation
*/
public void dropLastRowsOrColums(CorrectionLocationType correctionLocation)
{
//do nothing
if( correctionLocation==CorrectionLocationType.NONE
|| correctionLocation==CorrectionLocationType.INVALID )
{
return;
}
//determine number of rows/cols to be removed
int step = ( correctionLocation==CorrectionLocationType.LASTTWOROWS
|| correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS) ? 2 : 1;
//e.g., colSums, colMeans, colMaxs, colMeans
if( correctionLocation==CorrectionLocationType.LASTROW
|| correctionLocation==CorrectionLocationType.LASTTWOROWS )
{
if( sparse ) //SPARSE
{
if(sparseRows!=null)
for(int i=1; i<=step; i++)
if(sparseRows[rlen-i]!=null)
this.nonZeros-=sparseRows[rlen-i].size();
}
else //DENSE
{
if(denseBlock!=null)
for(int i=(rlen-step)*clen; i<rlen*clen; i++)
if(denseBlock[i]!=0)
this.nonZeros--;
}
//just need to shrink the dimension, the deleted rows won't be accessed
rlen -= step;
}
//e.g., rowSums, rowsMeans, rowsMaxs, rowsMeans
if( correctionLocation==CorrectionLocationType.LASTCOLUMN
|| correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS )
{
if(sparse) //SPARSE
{
if(sparseRows!=null)
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
if(sparseRows[r]!=null)
{
int newSize=sparseRows[r].searchIndexesFirstGTE(clen-step);
if(newSize>=0)
{
this.nonZeros-=sparseRows[r].size()-newSize;
sparseRows[r].truncate(newSize);
}
}
}
}
else //DENSE
{
if(this.denseBlock!=null)
{
//the first row doesn't need to be copied
int targetIndex=clen-step;
int sourceOffset=clen;
this.nonZeros=0;
for(int i=0; i<targetIndex; i++)
if(denseBlock[i]!=0)
this.nonZeros++;
//start from the 2nd row
for(int r=1; r<rlen; r++)
{
for(int c=0; c<clen-step; c++)
{
if((denseBlock[targetIndex]=denseBlock[sourceOffset+c])!=0)
this.nonZeros++;
targetIndex++;
}
sourceOffset+=clen;
}
}
}
clen -= step;
}
}
public CM_COV_Object cmOperations(CMOperator op)
throws DMLRuntimeException
{
/* this._data must be a 1 dimensional vector */
if ( this.getNumColumns() != 1) {
throw new DMLRuntimeException("Central Moment can not be computed on ["
+ this.getNumRows() + "," + this.getNumColumns() + "] matrix.");
}
CM_COV_Object cmobj = new CM_COV_Object();
int nzcount = 0;
if(sparse && sparseRows!=null) //SPARSE
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++) {
op.fn.execute(cmobj, values[i]);
nzcount++;
}
}
// account for zeros in the vector
op.fn.execute(cmobj, 0.0, this.getNumRows()-nzcount);
}
else if(denseBlock!=null) //DENSE
{
//always vector (see check above)
for(int i=0; i<rlen; i++)
op.fn.execute(cmobj, denseBlock[i]);
}
return cmobj;
}
public CM_COV_Object cmOperations(CMOperator op, MatrixBlock weights)
throws DMLRuntimeException
{
/* this._data must be a 1 dimensional vector */
if ( this.getNumColumns() != 1 || weights.getNumColumns() != 1) {
throw new DMLRuntimeException("Central Moment can be computed only on 1-dimensional column matrices.");
}
if ( this.getNumRows() != weights.getNumRows() || this.getNumColumns() != weights.getNumColumns()) {
throw new DMLRuntimeException("Covariance: Mismatching dimensions between input and weight matrices - " +
"["+this.getNumRows()+","+this.getNumColumns() +"] != ["
+ weights.getNumRows() + "," + weights.getNumColumns() +"]");
}
CM_COV_Object cmobj = new CM_COV_Object();
if (sparse && sparseRows!=null) //SPARSE
{
for(int i=0; i < rlen; i++)
op.fn.execute(cmobj, this.quickGetValue(i,0), weights.quickGetValue(i,0));
}
else if(denseBlock!=null) //DENSE
{
//always vectors (see check above)
if( !weights.sparse )
{
//both dense vectors (default case)
if(weights.denseBlock!=null)
for( int i=0; i<rlen; i++ )
op.fn.execute(cmobj, denseBlock[i], weights.denseBlock[i]);
}
else
{
for(int i=0; i<rlen; i++)
op.fn.execute(cmobj, denseBlock[i], weights.quickGetValue(i,0) );
}
}
return cmobj;
}
public CM_COV_Object covOperations(COVOperator op, MatrixBlock that)
throws DMLRuntimeException
{
/* this._data must be a 1 dimensional vector */
if ( this.getNumColumns() != 1 || that.getNumColumns() != 1 ) {
throw new DMLRuntimeException("Covariance can be computed only on 1-dimensional column matrices.");
}
if ( this.getNumRows() != that.getNumRows() || this.getNumColumns() != that.getNumColumns()) {
throw new DMLRuntimeException("Covariance: Mismatching input matrix dimensions - " +
"["+this.getNumRows()+","+this.getNumColumns() +"] != ["
+ that.getNumRows() + "," + that.getNumColumns() +"]");
}
CM_COV_Object covobj = new CM_COV_Object();
if(sparse && sparseRows!=null) //SPARSE
{
for(int i=0; i < rlen; i++ )
op.fn.execute(covobj, this.quickGetValue(i,0), that.quickGetValue(i,0));
}
else if(denseBlock!=null) //DENSE
{
//always vectors (see check above)
if( !that.sparse )
{
//both dense vectors (default case)
if(that.denseBlock!=null)
for( int i=0; i<rlen; i++ )
op.fn.execute(covobj, denseBlock[i], that.denseBlock[i]);
}
else
{
for(int i=0; i<rlen; i++)
op.fn.execute(covobj, denseBlock[i], that.quickGetValue(i,0));
}
}
return covobj;
}
public CM_COV_Object covOperations(COVOperator op, MatrixBlock that, MatrixBlock weights)
throws DMLRuntimeException
{
/* this._data must be a 1 dimensional vector */
if ( this.getNumColumns() != 1 || that.getNumColumns() != 1 || weights.getNumColumns() != 1) {
throw new DMLRuntimeException("Covariance can be computed only on 1-dimensional column matrices.");
}
if ( this.getNumRows() != that.getNumRows() || this.getNumColumns() != that.getNumColumns()) {
throw new DMLRuntimeException("Covariance: Mismatching input matrix dimensions - " +
"["+this.getNumRows()+","+this.getNumColumns() +"] != ["
+ that.getNumRows() + "," + that.getNumColumns() +"]");
}
if ( this.getNumRows() != weights.getNumRows() || this.getNumColumns() != weights.getNumColumns()) {
throw new DMLRuntimeException("Covariance: Mismatching dimensions between input and weight matrices - " +
"["+this.getNumRows()+","+this.getNumColumns() +"] != ["
+ weights.getNumRows() + "," + weights.getNumColumns() +"]");
}
CM_COV_Object covobj = new CM_COV_Object();
if(sparse && sparseRows!=null) //SPARSE
{
for(int i=0; i < rlen; i++ )
op.fn.execute(covobj, this.quickGetValue(i,0), that.quickGetValue(i,0), weights.quickGetValue(i,0));
}
else if(denseBlock!=null) //DENSE
{
//always vectors (see check above)
if( !that.sparse && !weights.sparse )
{
//all dense vectors (default case)
if(that.denseBlock!=null)
for( int i=0; i<rlen; i++ )
op.fn.execute(covobj, denseBlock[i], that.denseBlock[i], weights.denseBlock[i]);
}
else
{
for(int i=0; i<rlen; i++)
op.fn.execute(covobj, denseBlock[i], that.quickGetValue(i,0), weights.quickGetValue(i,0));
}
}
return covobj;
}
public MatrixValue sortOperations(MatrixValue weights, MatrixValue result) throws DMLRuntimeException, DMLUnsupportedOperationException {
boolean wtflag = (weights!=null);
MatrixBlock wts= (weights == null ? null : checkType(weights));
checkType(result);
if ( getNumColumns() != 1 ) {
throw new DMLRuntimeException("Invalid input dimensions (" + getNumRows() + "x" + getNumColumns() + ") to sort operation.");
}
if ( wts != null && wts.getNumColumns() != 1 ) {
throw new DMLRuntimeException("Invalid weight dimensions (" + wts.getNumRows() + "x" + wts.getNumColumns() + ") to sort operation.");
}
// prepare result, currently always dense
// #rows in temp matrix = 1 + #nnz in the input ( 1 is for the "zero" value)
int dim1 = (int) (1+this.getNonZeros());
if(result==null)
result=new MatrixBlock(dim1, 2, false);
else
result.reset(dim1, 2, false);
// Copy the input elements into a temporary array for sorting
// First column is data and second column is weights
// (since the inputs are vectors, they are likely dense - hence quickget is sufficient)
MatrixBlock tdw = new MatrixBlock(dim1, 2, false);
double d, w, zero_wt=0;
int ind = 1;
if( wtflag ) // w/ weights
{
for ( int i=0; i<rlen; i++ ) {
d = quickGetValue(i,0);
w = wts.quickGetValue(i,0);
if ( d != 0 ) {
tdw.quickSetValue(ind, 0, d);
tdw.quickSetValue(ind, 1, w);
ind++;
}
else
zero_wt += w;
}
}
else //w/o weights
{
zero_wt = getNumRows() - getNonZeros();
for( int i=0; i<rlen; i++ ) {
d = quickGetValue(i,0);
if( d != 0 ){
tdw.quickSetValue(ind, 0, d);
tdw.quickSetValue(ind, 1, 1);
ind++;
}
}
}
tdw.quickSetValue(0, 0, 0.0);
tdw.quickSetValue(0, 1, zero_wt); //num zeros in input
// Sort td and tw based on values inside td (ascending sort), incl copy into result
SortIndex sfn = SortIndex.getSortIndexFnObject(1, false, false);
ReorgOperator rop = new ReorgOperator(sfn);
LibMatrixReorg.reorg(tdw, (MatrixBlock)result, rop);
return result;
}
public double interQuartileMean() throws DMLRuntimeException {
double sum_wt = sumWeightForQuantile();
double q25d = 0.25*sum_wt;
double q75d = 0.75*sum_wt;
int q25i = (int) Math.ceil(q25d);
int q75i = (int) Math.ceil(q75d);
// skip until (but excluding) q25
int t = 0, i=-1;
while(i<getNumRows() && t < q25i) {
i++;
//System.out.println(" " + i + ": " + quickGetValue(i,0) + "," + quickGetValue(i,1));
t += quickGetValue(i,1);
}
// compute the portion of q25
double runningSum = (t-q25d)*quickGetValue(i,0);
// add until (including) q75
while(i<getNumRows() && t < q75i) {
i++;
t += quickGetValue(i,1);
runningSum += quickGetValue(i,0)*quickGetValue(i,1);
}
// subtract additional portion of q75
runningSum -= (t-q75d)*quickGetValue(i,0);
return runningSum/(sum_wt*0.5);
}
/**
* Computes the weighted interQuartileMean.
* The matrix block ("this" pointer) has two columns, in which the first column
* refers to the data and second column denotes corresponding weights.
*
* @return InterQuartileMean
* @throws DMLRuntimeException
*/
public double interQuartileMeanOLD() throws DMLRuntimeException {
double sum_wt = sumWeightForQuantile();
int fromPos = (int) Math.ceil(0.25*sum_wt);
int toPos = (int) Math.ceil(0.75*sum_wt);
int selectRange = toPos-fromPos; // range: (fromPos,toPos]
if ( selectRange == 0 )
return 0.0;
int index, count=0;
// The first row (0^th row) has value 0.
// If it has a non-zero weight i.e., input data has zero values
// then "index" must start from 0, otherwise we skip the first row
// and start with the next value in the data, which is in the 1st row.
if ( quickGetValue(0,1) > 0 )
index = 0;
else
index = 1;
// keep scanning the weights, until we hit the required position <code>fromPos</code>
while ( count < fromPos ) {
count += quickGetValue(index,1);
++index;
}
double runningSum;
double val;
int wt, selectedCount;
runningSum = (count-fromPos) * quickGetValue(index-1, 0);
selectedCount = (count-fromPos);
while(count <= toPos ) {
val = quickGetValue(index,0);
wt = (int) quickGetValue(index,1);
runningSum += (val * Math.min(wt, selectRange-selectedCount));
selectedCount += Math.min(wt, selectRange-selectedCount);
count += wt;
++index;
}
//System.out.println(fromPos + ", " + toPos + ": " + count + ", "+ runningSum + ", " + selectedCount);
return runningSum/selectedCount;
}
public MatrixValue pickValues(MatrixValue quantiles, MatrixValue ret)
throws DMLUnsupportedOperationException, DMLRuntimeException {
MatrixBlock qs=checkType(quantiles);
if ( qs.clen != 1 ) {
throw new DMLRuntimeException("Multiple quantiles can only be computed on a 1D matrix");
}
MatrixBlock output = checkType(ret);
if(output==null)
output=new MatrixBlock(qs.rlen, qs.clen, false); // resulting matrix is mostly likely be dense
else
output.reset(qs.rlen, qs.clen, false);
for ( int i=0; i < qs.rlen; i++ ) {
output.quickSetValue(i, 0, this.pickValue(qs.quickGetValue(i,0)) );
}
return output;
}
public double median() throws DMLRuntimeException {
double sum_wt = sumWeightForQuantile();
return pickValue(0.5, sum_wt%2==0);
}
public double pickValue(double quantile) throws DMLRuntimeException{
return pickValue(quantile, false);
}
public double pickValue(double quantile, boolean average)
throws DMLRuntimeException
{
double sum_wt = sumWeightForQuantile();
// do averaging only if it is asked for; and sum_wt is even
average = average && (sum_wt%2 == 0);
int pos = (int) Math.ceil(quantile*sum_wt);
int t = 0, i=-1;
do {
i++;
t += quickGetValue(i,1);
} while(t<pos && i < getNumRows());
//System.out.println("values: " + quickGetValue(i,0) + "," + quickGetValue(i,1) + " -- " + quickGetValue(i+1,0) + "," + quickGetValue(i+1,1));
if ( quickGetValue(i,1) != 0 ) {
// i^th value is present in the data set, simply return it
if ( average ) {
if(pos < t) {
return quickGetValue(i,0);
}
if(quickGetValue(i+1,1) != 0)
return (quickGetValue(i,0)+quickGetValue(i+1,0))/2;
else
// (i+1)^th value is 0. So, fetch (i+2)^th value
return (quickGetValue(i,0)+quickGetValue(i+2,0))/2;
}
else
return quickGetValue(i, 0);
}
else {
// i^th value is not present in the data set.
// It can only happen in the case where i^th value is 0.0; and 0.0 is not present in the data set (but introduced by sort).
if ( i+1 < getNumRows() )
// when 0.0 is not the last element in the sorted order
return quickGetValue(i+1,0);
else
// when 0.0 is the last element in the sorted order (input data is all negative)
return quickGetValue(i-1,0);
}
}
/**
* In a given two column matrix, the second column denotes weights.
* This function computes the total weight
*
* @return
* @throws DMLRuntimeException
*/
private double sumWeightForQuantile()
throws DMLRuntimeException
{
double sum_wt = 0;
for (int i=0; i < getNumRows(); i++ )
sum_wt += quickGetValue(i, 1);
if ( Math.floor(sum_wt) < sum_wt ) {
throw new DMLRuntimeException("Unexpected error while computing quantile -- weights must be integers.");
}
return sum_wt;
}
/**
*
* @param m1Index
* @param m1Value
* @param m2Index
* @param m2Value
* @param result
* @param op
* @return
* @throws DMLUnsupportedOperationException
* @throws DMLRuntimeException
*/
public MatrixValue aggregateBinaryOperations(MatrixIndexes m1Index, MatrixValue m1Value, MatrixIndexes m2Index, MatrixValue m2Value,
MatrixValue result, AggregateBinaryOperator op )
throws DMLUnsupportedOperationException, DMLRuntimeException
{
return aggregateBinaryOperations(m1Value, m2Value, result, op);
}
/**
*
*/
public MatrixValue aggregateBinaryOperations(MatrixValue m1Value, MatrixValue m2Value, MatrixValue result, AggregateBinaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//check input types, dimensions, configuration
MatrixBlock m1 = checkType(m1Value);
MatrixBlock m2 = checkType(m2Value);
MatrixBlock ret = checkType(result);
if( m1.clen != m2.rlen ) {
throw new RuntimeException("Dimensions do not match for matrix multiplication ("+m1.clen+"!="+m2.rlen+").");
}
if( !(op.binaryFn instanceof Multiply && op.aggOp.increOp.fn instanceof Plus) ) {
throw new DMLRuntimeException("Unsupported binary aggregate operation: ("+op.binaryFn+", "+op.aggOp+").");
}
//setup meta data (dimensions, sparsity)
int rl = m1.rlen;
int cl = m2.clen;
SparsityEstimate sp = estimateSparsityOnAggBinary(m1, m2, op);
//create output matrix block
if( ret==null )
ret = new MatrixBlock(rl, cl, sp.sparse, sp.estimatedNonZeros);//m1.sparse&&m2.sparse);
else
ret.reset(rl, cl, sp.sparse, sp.estimatedNonZeros);//m1.sparse&&m2.sparse);
//compute matrix multiplication (only supported binary aggregate operation)
if( op.getNumThreads() > 1 )
LibMatrixMult.matrixMult(m1, m2, ret, op.getNumThreads());
else
LibMatrixMult.matrixMult(m1, m2, ret);
return ret;
}
/**
*
* @param m1
* @param m2
* @param m3
* @param op
* @return
* @throws DMLUnsupportedOperationException
* @throws DMLRuntimeException
*/
public ScalarObject aggregateTernaryOperations(MatrixBlock m1, MatrixBlock m2, MatrixBlock m3, AggregateBinaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//check input dimensions and operators
if( m1.rlen!=m2.rlen || m2.rlen!=m3.rlen || m1.clen!=1 || m2.clen!=1 || m3.clen!=1 )
throw new DMLRuntimeException("Invalid dimensions for aggregate tertiary ("+m1.rlen+"x"+m1.clen+", "+m2.rlen+"x"+m2.clen+", "+m3.rlen+"x"+m3.clen+").");
if( !( op.aggOp.increOp.fn instanceof KahanPlus && op.binaryFn instanceof Multiply) )
throw new DMLRuntimeException("Unsupported operator for aggregate tertiary operations.");
//early abort if any block is empty
if( m1.isEmptyBlock(false) || m2.isEmptyBlock(false) || m3.isEmptyBlock(false) )
return new DoubleObject(0);
//setup meta data (dimensions, sparsity)
int rlen = m1.rlen;
//compute block operations
KahanObject kbuff = new KahanObject(0, 0);
KahanPlus kplus = KahanPlus.getKahanPlusFnObject();
if( !m1.sparse && !m2.sparse && !m3.sparse ) //DENSE
{
double[] a = m1.denseBlock;
double[] b = m2.denseBlock;
double[] c = m3.denseBlock;
for( int i=0; i<rlen; i++ ) {
double val = a[i] * b[i] * c[i];
kplus.execute2( kbuff, val );
}
}
else //GENERAL CASE
{
for( int i=0; i<rlen; i++ ) {
double val1 = m1.quickGetValue(i, 0);
double val2 = m2.quickGetValue(i, 0);
double val3 = m3.quickGetValue(i, 0);
double val = val1 * val2 * val3;
kplus.execute2( kbuff, val );
}
}
//create output
DoubleObject ret = new DoubleObject(kbuff._sum);
return ret;
}
/**
* Invocation from CP instructions. The aggregate is computed on the groups object
* against target and weights.
*
* Notes:
* * The computed number of groups is reused for multiple invocations with different target.
* * This implementation supports that the target is passed as column or row vector,
* in case of row vectors we also use sparse-safe implementations for sparse safe
* aggregation operators.
*
* @param tgt
* @param wghts
* @param ret
* @param op
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue groupedAggOperations(MatrixValue tgt, MatrixValue wghts, MatrixValue ret, int ngroups, Operator op)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//setup input matrices
// this <- groups
MatrixBlock target = checkType(tgt);
MatrixBlock weights = checkType(wghts);
//check valid dimensions
if( this.getNumColumns() != 1 || (weights!=null && weights.getNumColumns()!=1) )
throw new DMLRuntimeException("groupedAggregate can only operate on 1-dimensional column matrices for groups and weights.");
if( target.getNumColumns() != 1 && op instanceof CMOperator )
throw new DMLRuntimeException("groupedAggregate can only operate on 1-dimensional column matrices for target (for this aggregation function).");
if( target.getNumColumns() != 1 && target.getNumRows()!=1 )
throw new DMLRuntimeException("groupedAggregate can only operate on 1-dimensional column or row matrix for target.");
if( this.getNumRows() != Math.max(target.getNumRows(),target.getNumColumns()) || (weights != null && this.getNumRows() != weights.getNumRows()) )
throw new DMLRuntimeException("groupedAggregate can only operate on matrices with equal dimensions.");
// obtain numGroups from instruction, if provided
if (ngroups > 0)
numGroups = ngroups;
// Determine the number of groups
if( numGroups <= 0 ) //reuse if available
{
double min = this.min();
double max = this.max();
if ( min <= 0 )
throw new DMLRuntimeException("Invalid value (" + min + ") encountered in 'groups' while computing groupedAggregate");
if ( max <= 0 )
throw new DMLRuntimeException("Invalid value (" + max + ") encountered in 'groups' while computing groupedAggregate.");
numGroups = (int) max;
}
// Allocate result matrix
MatrixBlock result = checkType(ret);
boolean result_sparsity = estimateSparsityOnGroupedAgg(rlen, numGroups);
if(result==null)
result=new MatrixBlock(numGroups, 1, result_sparsity);
else
result.reset(numGroups, 1, result_sparsity);
// Compute the result
double w = 1; // default weight
//CM operator for count, mean, variance
//note: current support only for column vectors
if(op instanceof CMOperator) {
// initialize required objects for storing the result of CM operations
CM cmFn = CM.getCMFnObject(((CMOperator) op).getAggOpType());
CM_COV_Object[] cmValues = new CM_COV_Object[numGroups];
for ( int i=0; i < numGroups; i++ )
cmValues[i] = new CM_COV_Object();
for ( int i=0; i < this.getNumRows(); i++ ) {
int g = (int) this.quickGetValue(i, 0);
if ( g > numGroups )
continue;
double d = target.quickGetValue(i,0);
if ( weights != null )
w = weights.quickGetValue(i,0);
// cmValues is 0-indexed, whereas range of values for g = [1,numGroups]
cmFn.execute(cmValues[g-1], d, w);
}
// extract the required value from each CM_COV_Object
for ( int i=0; i < numGroups; i++ )
// result is 0-indexed, so is cmValues
result.quickSetValue(i, 0, cmValues[i].getRequiredResult(op));
}
//Aggregate operator for sum (via kahan sum)
//note: support for row/column vectors and dense/sparse
else if( op instanceof AggregateOperator )
{
//the only aggregate operator that is supported here is sum,
//furthermore, we always use KahanPlus and hence aggop.correctionExists is true
AggregateOperator aggop = (AggregateOperator) op;
//default case for aggregate(sum)
groupedAggregateKahanPlus(target, weights, result, aggop);
}
else
throw new DMLRuntimeException("Invalid operator (" + op + ") encountered while processing groupedAggregate.");
return result;
}
/**
* This is a specific implementation for aggregate(fn="sum"), where we use KahanPlus for numerical
* stability. In contrast to other functions of aggregate, this implementation supports row and column
* vectors for target and exploits sparse representations since KahanPlus is sparse-safe.
*
* @param target
* @param weights
* @param op
* @throws DMLRuntimeException
*/
private void groupedAggregateKahanPlus( MatrixBlock target, MatrixBlock weights, MatrixBlock result, AggregateOperator aggop ) throws DMLRuntimeException
{
boolean rowVector = target.getNumColumns()>1;
double w = 1; //default weight
//skip empty blocks (sparse-safe operation)
if( target.isEmptyBlock(false) )
return;
//init group buffers
KahanObject[] buffer = new KahanObject[numGroups];
for(int i=0; i < numGroups; i++ )
buffer[i] = new KahanObject(aggop.initialValue, 0);
if( rowVector ) //target is rowvector
{
if( target.sparse ) //SPARSE target
{
if( target.sparseRows[0]!=null )
{
int len = target.sparseRows[0].size();
int[] aix = target.sparseRows[0].getIndexContainer();
double[] avals = target.sparseRows[0].getValueContainer();
for( int j=0; j<len; j++ ) //for each nnz
{
int g = (int) this.quickGetValue(aix[j], 0);
if ( g > numGroups )
continue;
if ( weights != null )
w = weights.quickGetValue(aix[j],0);
aggop.increOp.fn.execute(buffer[g-1], avals[j]*w);
}
}
}
else //DENSE target
{
for ( int i=0; i < target.getNumColumns(); i++ ) {
double d = target.denseBlock[ i ];
if( d != 0 ) //sparse-safe
{
int g = (int) this.quickGetValue(i, 0);
if ( g > numGroups )
continue;
if ( weights != null )
w = weights.quickGetValue(i,0);
// buffer is 0-indexed, whereas range of values for g = [1,numGroups]
aggop.increOp.fn.execute(buffer[g-1], d*w);
}
}
}
}
else //column vector (always dense, but works for sparse as well)
{
for ( int i=0; i < this.getNumRows(); i++ )
{
double d = target.quickGetValue(i,0);
if( d != 0 ) //sparse-safe
{
int g = (int) this.quickGetValue(i, 0);
if ( g > numGroups )
continue;
if ( weights != null )
w = weights.quickGetValue(i,0);
// buffer is 0-indexed, whereas range of values for g = [1,numGroups]
aggop.increOp.fn.execute(buffer[g-1], d*w);
}
}
}
// extract the results from group buffers
for ( int i=0; i < numGroups; i++ )
result.quickSetValue(i, 0, buffer[i]._sum);
}
public MatrixValue removeEmptyOperations( MatrixValue ret, boolean rows )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//check for empty inputs
//(the semantics of removeEmpty are that for an empty m-by-n matrix, the output
//is an empty 1-by-n or m-by-1 matrix because we dont allow matrices with dims 0)
if( nonZeros==0 ) {
if( rows )
ret.reset(1, clen, false);
else //cols
ret.reset(rlen, 1, false);
return ret;
}
MatrixBlock result = checkType(ret);
if( rows )
return removeEmptyRows(result);
else //cols
return removeEmptyColumns(result);
}
/**
*
* @param ret
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
private MatrixBlock removeEmptyRows(MatrixBlock ret)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
final int m = rlen;
final int n = clen;
//Step 1: scan block and determine non-empty rows
boolean[] flags = new boolean[ m ]; //false
int rlen2 = 0;
if( sparse ) //SPARSE
{
SparseRow[] a = sparseRows;
for ( int i=0; i < m; i++ )
if ( a[i] != null && !a[i].isEmpty() ) {
flags[i] = true;
rlen2++;
}
}
else //DENSE
{
double[] a = denseBlock;
for(int i=0, aix=0; i<m; i++, aix+=n) {
for(int j=0; j<n; j++)
if( a[aix+j] != 0 )
{
flags[i] = true;
rlen2++;
//early abort for current row
break;
}
}
}
//Step 2: reset result and copy rows
//dense stays dense, sparse might be dense/sparse
rlen2 = Math.max(rlen2, 1); //ensure valid output
boolean sp = evalSparseFormatInMemory(rlen2, n, nonZeros);
ret.reset(rlen2, n, sp);
if( sparse ) //SPARSE
{
//note: output dense or sparse
for( int i=0, cix=0; i<m; i++ )
if( flags[i] )
ret.appendRow(cix++, sparseRows[i]);
}
else //DENSE
{
ret.allocateDenseBlock();
double[] a = denseBlock;
double[] c = ret.denseBlock;
for( int i=0, aix=0, cix=0; i<m; i++, aix+=n )
if( flags[i] )
{
System.arraycopy(a, aix, c, cix, n);
cix += n; //target index
}
}
//check sparsity
ret.nonZeros = this.nonZeros;
ret.examSparsity();
return ret;
}
/**
*
* @param ret
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
private MatrixBlock removeEmptyColumns(MatrixBlock ret)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
final int m = rlen;
final int n = clen;
//Step 1: scan block and determine non-empty columns
//(we optimized for cache-friendly behavior and hence don't do early abort)
boolean[] flags = new boolean[ n ]; //false
if( sparse ) //SPARSE
{
SparseRow[] a = sparseRows;
for( int i=0; i<m; i++ )
if ( a[i] != null && !a[i].isEmpty() ) {
int alen = a[i].size();
int[] aix = a[i].getIndexContainer();
for( int j=0; j<alen; j++ )
flags[ aix[j] ] = true;
}
}
else //DENSE
{
double[] a = denseBlock;
for(int i=0, aix=0; i<m; i++)
for(int j=0; j<n; j++, aix++)
if( a[aix] != 0 )
flags[j] = true;
}
//Step 2: determine number of columns
int clen2 = 0;
for( int j=0; j<n; j++ )
clen2 += flags[j] ? 1 : 0;
//Step 3: create mapping of flags to target indexes
int[] cix = new int[n];
for( int j=0, pos=0; j<n; j++ )
if( flags[j] )
cix[j] = pos++;
//Step 3: reset result and copy cols
//dense stays dense, sparse might be dense/sparse
clen2 = Math.max(clen2, 1); //ensure valid output
boolean sp = evalSparseFormatInMemory(m, clen2, nonZeros);
ret.reset(m, clen2, sp);
if( sparse ) //SPARSE
{
//note: output dense or sparse
SparseRow[] a = sparseRows;
for( int i=0; i<m; i++ )
if ( a[i] != null && !a[i].isEmpty() ) {
int alen = a[i].size();
int[] aix = a[i].getIndexContainer();
double[] avals = a[i].getValueContainer();
for( int j=0; j<alen; j++ )
ret.appendValue(i, cix[aix[j]], avals[j]);
}
}
else //DENSE
{
ret.allocateDenseBlock();
double[] a = denseBlock;
double[] c = ret.denseBlock;
for(int i=0, aix=0, lcix=0; i<m; i++, lcix+=clen2)
for(int j=0; j<n; j++, aix++)
if( a[aix] != 0 )
c[ lcix+cix[j] ] = a[aix];
}
//check sparsity
ret.nonZeros = this.nonZeros;
ret.examSparsity();
return ret;
}
@Override
public MatrixValue replaceOperations(MatrixValue result, double pattern, double replacement)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock ret = checkType(result);
examSparsity(); //ensure its in the right format
ret.reset(rlen, clen, sparse);
if( nonZeros == 0 && pattern != 0 )
return ret; //early abort
boolean NaNpattern = Double.isNaN(pattern);
if( sparse ) //SPARSE
{
if( pattern != 0d ) //SPARSE <- SPARSE (sparse-safe)
{
ret.allocateSparseRowsBlock();
SparseRow[] a = sparseRows;
SparseRow[] c = ret.sparseRows;
for( int i=0; i<rlen; i++ )
{
SparseRow arow = a[ i ];
if( arow!=null && !arow.isEmpty() )
{
SparseRow crow = new SparseRow(arow.size());
int alen = arow.size();
int[] aix = arow.getIndexContainer();
double[] avals = arow.getValueContainer();
for( int j=0; j<alen; j++ )
{
double val = avals[j];
if( val== pattern || (NaNpattern && Double.isNaN(val)) )
crow.append(aix[j], replacement);
else
crow.append(aix[j], val);
}
c[ i ] = crow;
}
}
}
else //DENSE <- SPARSE
{
ret.sparse = false;
ret.allocateDenseBlock();
SparseRow[] a = sparseRows;
double[] c = ret.denseBlock;
//initialize with replacement (since all 0 values, see SPARSITY_TURN_POINT)
Arrays.fill(c, replacement);
//overwrite with existing values (via scatter)
if( a != null ) //check for empty matrix
for( int i=0, cix=0; i<rlen; i++, cix+=clen )
{
SparseRow arow = a[ i ];
if( arow!=null && !arow.isEmpty() )
{
int alen = arow.size();
int[] aix = arow.getIndexContainer();
double[] avals = arow.getValueContainer();
for( int j=0; j<alen; j++ )
if( avals[ j ] != 0 )
c[ cix+aix[j] ] = avals[ j ];
}
}
}
}
else //DENSE <- DENSE
{
int mn = ret.rlen * ret.clen;
ret.allocateDenseBlock();
double[] a = denseBlock;
double[] c = ret.denseBlock;
for( int i=0; i<mn; i++ )
{
double val = a[i];
if( val== pattern || (NaNpattern && Double.isNaN(val)) )
c[i] = replacement;
else
c[i] = a[i];
}
}
ret.recomputeNonZeros();
ret.examSparsity();
return ret;
}
/**
* D = ctable(A,v2,W)
* this <- A; scalarThat <- v2; that2 <- W; result <- D
*
* (i1,j1,v1) from input1 (this)
* (v2) from sclar_input2 (scalarThat)
* (i3,j3,w) from input3 (that2)
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
@Override
public void ternaryOperations(Operator op, double scalarThat,
MatrixValue that2Val, HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that2 = checkType(that2Val);
CTable ctable = CTable.getCTableFnObject();
double v2 = scalarThat;
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
if ( resultBlock == null ) {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double w = that2.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultMap);
}
}
else {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double w = that2.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultBlock);
}
resultBlock.recomputeNonZeros();
}
}
/**
* D = ctable(A,v2,w)
* this <- A; scalar_that <- v2; scalar_that2 <- w; result <- D
*
* (i1,j1,v1) from input1 (this)
* (v2) from sclar_input2 (scalarThat)
* (w) from scalar_input3 (scalarThat2)
*/
@Override
public void ternaryOperations(Operator op, double scalarThat,
double scalarThat2, HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
CTable ctable = CTable.getCTableFnObject();
double v2 = scalarThat;
double w = scalarThat2;
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
if ( resultBlock == null ) {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultMap);
}
}
else {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultBlock);
}
resultBlock.recomputeNonZeros();
}
}
/**
* Specific ctable case of ctable(seq(...),X), where X is the only
* matrix input. The 'left' input parameter specifies if the seq appeared
* on the left, otherwise it appeared on the right.
*
*/
@Override
public void ternaryOperations(Operator op, MatrixIndexes ix1, double scalarThat,
boolean left, int brlen, HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
CTable ctable = CTable.getCTableFnObject();
double w = scalarThat;
int offset = (int) ((ix1.getRowIndex()-1)*brlen);
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
if( resultBlock == null) {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
if( left )
ctable.execute(offset+i+1, v1, w, false, resultMap);
else
ctable.execute(v1, offset+i+1, w, false, resultMap);
}
}
else {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
if( left )
ctable.execute(offset+i+1, v1, w, false, resultBlock);
else
ctable.execute(v1, offset+i+1, w, false, resultBlock);
}
resultBlock.recomputeNonZeros();
}
}
/**
* D = ctable(A,B,w)
* this <- A; that <- B; scalar_that2 <- w; result <- D
*
* (i1,j1,v1) from input1 (this)
* (i1,j1,v2) from input2 (that)
* (w) from scalar_input3 (scalarThat2)
*
* NOTE: This method supports both vectors and matrices. In case of matrices and ignoreZeros=true
* we can also use a sparse-safe implementation
*/
@Override
public void ternaryOperations(Operator op, MatrixValue thatVal, double scalarThat2, boolean ignoreZeros,
HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//setup ctable computation
MatrixBlock that = checkType(thatVal);
CTable ctable = CTable.getCTableFnObject();
double w = scalarThat2;
if( ignoreZeros //SPARSE-SAFE & SPARSE INPUTS
&& this.sparse && that.sparse )
{
//note: only used if both inputs have aligned zeros, which
//allows us to infer that the nnz both inputs are equivalent
//early abort on empty blocks possible
if( this.isEmptyBlock(false) && that.isEmptyBlock(false) )
return;
SparseRow[] a = this.sparseRows;
SparseRow[] b = that.sparseRows;
for( int i=0; i<rlen; i++ )
{
SparseRow arow = a[i];
SparseRow brow = b[i];
if( arow != null && !arow.isEmpty() )
{
int alen = arow.size();
double[] avals = arow.getValueContainer();
double[] bvals = brow.getValueContainer();
if( resultBlock == null ) {
for( int j=0; j<alen; j++ )
ctable.execute(avals[j], bvals[j], w, ignoreZeros, resultMap);
}
else {
for( int j=0; j<alen; j++ )
ctable.execute(avals[j], bvals[j], w, ignoreZeros, resultBlock);
}
}
}
}
else //SPARSE-UNSAFE | GENERIC INPUTS
{
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double v2 = that.quickGetValue(i, j);
if( resultBlock == null )
ctable.execute(v1, v2, w, ignoreZeros, resultMap);
else
ctable.execute(v1, v2, w, ignoreZeros, resultBlock);
}
}
//maintain nnz (if necessary)
if( resultBlock!=null )
resultBlock.recomputeNonZeros();
}
/**
* D = ctable(seq,A,w)
* this <- seq; thatMatrix <- A; thatScalar <- w; result <- D
*
* (i1,j1,v1) from input1 (this)
* (i1,j1,v2) from input2 (that)
* (w) from scalar_input3 (scalarThat2)
*/
public void ternaryOperations(Operator op, MatrixValue thatMatrix, double thatScalar, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that = checkType(thatMatrix);
CTable ctable = CTable.getCTableFnObject();
double w = thatScalar;
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
//resultBlock guaranteed to be allocated for ctableexpand
//each row in resultBlock will be allocated and will contain exactly one value
int maxCol = 0;
for( int i=0; i<rlen; i++ ) {
double v2 = that.quickGetValue(i, 0);
maxCol = ctable.execute(i+1, v2, w, maxCol, resultBlock);
}
//update meta data (initially unknown number of columns)
//note: nnz maintained in ctable (via quickset)
resultBlock.clen = maxCol;
}
/**
* D = ctable(A,B,W)
* this <- A; that <- B; that2 <- W; result <- D
*
* (i1,j1,v1) from input1 (this)
* (i1,j1,v2) from input2 (that)
* (i1,j1,w) from input3 (that2)
*/
public void ternaryOperations(Operator op, MatrixValue thatVal, MatrixValue that2Val, HashMap<MatrixIndexes, Double> ctableResult)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
ternaryOperations(op, thatVal, that2Val, ctableResult, null);
}
@Override
public void ternaryOperations(Operator op, MatrixValue thatVal, MatrixValue that2Val, HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that = checkType(thatVal);
MatrixBlock that2 = checkType(that2Val);
CTable ctable = CTable.getCTableFnObject();
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
if(resultBlock == null)
{
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double v2 = that.quickGetValue(i, j);
double w = that2.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultMap);
}
}
else
{
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double v2 = that.quickGetValue(i, j);
double w = that2.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultBlock);
}
resultBlock.recomputeNonZeros();
}
}
@Override
public MatrixValue quaternaryOperations(Operator op, MatrixValue um, MatrixValue vm, MatrixValue wm, MatrixValue out, WeightsType wt)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//check input dimensions
if( getNumRows() != um.getNumRows() )
throw new DMLRuntimeException("Dimension mismatch rows on wsloss: "+getNumRows()+"!="+um.getNumRows());
if( getNumColumns() != vm.getNumRows() )
throw new DMLRuntimeException("Dimension mismatch columns on wsloss: "+getNumRows()+"!="+vm.getNumRows());
//check input data types
MatrixBlock X = this;
MatrixBlock W = (wt!=WeightsType.NONE)?checkType(wm):null;
MatrixBlock U = checkType(um);
MatrixBlock V = checkType(vm);
MatrixBlock R = checkType(out);
//prepare intermediates and output
R.reset(1, 1, false);
//core block computation
LibMatrixMult.matrixMultWSLoss(X, U, V, W, R, wt);
return R;
}
////////
// Data Generation Methods
// (rand, sequence)
/**
* Function to generate the random matrix with specified dimensions (block sizes are not specified).
*
* @param rows
* @param cols
* @param sparsity
* @param min
* @param max
* @param pdf
* @param seed
* @return
* @throws DMLRuntimeException
*/
public static MatrixBlock randOperations(int rows, int cols, double sparsity, double min, double max, String pdf, long seed)
throws DMLRuntimeException
{
DMLConfig conf = ConfigurationManager.getConfig();
int blocksize = (conf!=null) ? ConfigurationManager.getConfig().getIntValue(DMLConfig.DEFAULT_BLOCK_SIZE)
: DMLTranslator.DMLBlockSize;
return randOperations(
rows, cols, blocksize, blocksize,
sparsity, min, max, pdf, seed);
}
/**
* Function to generate the random matrix with specified dimensions and block dimensions.
* @param rows
* @param cols
* @param rowsInBlock
* @param colsInBlock
* @param sparsity
* @param min
* @param max
* @param pdf
* @param seed
* @return
* @throws DMLRuntimeException
*/
public static MatrixBlock randOperations(int rows, int cols, int rowsInBlock, int colsInBlock, double sparsity, double min, double max, String pdf, long seed)
throws DMLRuntimeException
{
MatrixBlock out = new MatrixBlock();
Well1024a bigrand = null;
long[] nnzInBlock = null;
//setup seeds and nnz per block
if( !LibMatrixDatagen.isShortcutRandOperation(min, max, sparsity, pdf) ){
bigrand = LibMatrixDatagen.setupSeedsForRand(seed);
nnzInBlock = LibMatrixDatagen.computeNNZperBlock(rows, cols, rowsInBlock, colsInBlock, sparsity);
}
//generate rand data
if ( pdf.equalsIgnoreCase(LibMatrixDatagen.RAND_PDF_NORMAL) ) {
// for normally distributed values, min and max are specified as an invalid value NaN.
out.randOperationsInPlace(pdf, rows, cols, rowsInBlock, colsInBlock, nnzInBlock, sparsity, Double.NaN, Double.NaN, bigrand, -1);
}
else {
out.randOperationsInPlace(pdf, rows, cols, rowsInBlock, colsInBlock, nnzInBlock, sparsity, min, max, bigrand, -1);
}
return out;
}
/**
* Function to generate a matrix of random numbers. This is invoked both
* from CP as well as from MR. In case of CP, it generates an entire matrix
* block-by-block. A <code>bigrand</code> is passed so that block-level
* seeds are generated internally. In case of MR, it generates a single
* block for given block-level seed <code>bSeed</code>.
*
* When pdf="uniform", cell values are drawn from uniform distribution in
* range <code>[min,max]</code>.
*
* When pdf="normal", cell values are drawn from standard normal
* distribution N(0,1). The range of generated values will always be
* (-Inf,+Inf).
*
* @param pdf
* @param rows
* @param cols
* @param rowsInBlock
* @param colsInBlock
* @param sparsity
* @param min
* @param max
* @param bigrand
* @param bSeed
* @return
* @throws DMLRuntimeException
*/
public MatrixBlock randOperationsInPlace(String pdf, int rows, int cols, int rowsInBlock, int colsInBlock, long[] nnzInBlock, double sparsity, double min, double max, Well1024a bigrand, long bSeed)
throws DMLRuntimeException
{
LibMatrixDatagen.generateRandomMatrix( this, pdf, rows, cols, rowsInBlock, colsInBlock,
nnzInBlock, sparsity, min, max, bigrand, bSeed );
return this;
}
/**
* Method to generate a sequence according to the given parameters. The
* generated sequence is always in dense format.
*
* Both end points specified <code>from</code> and <code>to</code> must be
* included in the generated sequence i.e., [from,to] both inclusive. Note
* that, <code>to</code> is included only if (to-from) is perfectly
* divisible by <code>incr</code>.
*
* For example, seq(0,1,0.5) generates (0.0 0.5 1.0)
* whereas seq(0,1,0.6) generates (0.0 0.6) but not (0.0 0.6 1.0)
*
* @param from
* @param to
* @param incr
* @return
* @throws DMLRuntimeException
*/
public static MatrixBlock seqOperations(double from, double to, double incr)
throws DMLRuntimeException
{
MatrixBlock out = new MatrixBlock();
LibMatrixDatagen.generateSequence( out, from, to, incr );
return out;
}
/**
*
* @param from
* @param to
* @param incr
* @return
* @throws DMLRuntimeException
*/
public MatrixBlock seqOperationsInPlace(double from, double to, double incr)
throws DMLRuntimeException
{
LibMatrixDatagen.generateSequence( this, from, to, incr );
return this;
}
////////
// Misc methods
private static MatrixBlock checkType(MatrixValue block) throws DMLUnsupportedOperationException
{
if( block!=null && !(block instanceof MatrixBlock))
throw new DMLUnsupportedOperationException("the Matrix Value is not MatrixBlockDSM!");
return (MatrixBlock) block;
}
public void print()
{
System.out.println("sparse = "+sparse);
if(!sparse)
System.out.println("nonzeros = "+nonZeros);
for(int i=0; i<rlen; i++)
{
for(int j=0; j<clen; j++)
{
System.out.print(quickGetValue(i, j)+"\t");
}
System.out.println();
}
}
@Override
public int compareTo(Object arg0) {
throw new RuntimeException("CompareTo should never be called for matrix blocks.");
}
@Override
public boolean equals(Object arg0) {
throw new RuntimeException("Equals should never be called for matrix blocks.");
}
@Override
public int hashCode() {
throw new RuntimeException("HashCode should never be called for matrix blocks.");
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append("sparse? = ");
sb.append(sparse);
sb.append("\n");
sb.append("nonzeros = ");
sb.append(nonZeros);
sb.append("\n");
sb.append("size: ");
sb.append(rlen);
sb.append(" X ");
sb.append(clen);
sb.append("\n");
if(sparse)
{
int len=0;
if(sparseRows!=null)
len = Math.min(rlen, sparseRows.length);
int i=0;
for(; i<len; i++)
{
sb.append("row +");
sb.append(i);
sb.append(": ");
sb.append(sparseRows[i]);
sb.append("\n");
}
for(; i<rlen; i++)
{
sb.append("row +");
sb.append(i);
sb.append(": null\n");
}
}
else
{
if(denseBlock!=null)
{
for(int i=0, ix=0; i<rlen; i++, ix+=clen) {
for(int j=0; j<clen; j++) {
sb.append(this.denseBlock[ix+j]);
sb.append("\t");
}
sb.append("\n");
}
}
}
return sb.toString();
}
///////////////////////////
// Helper classes
public static class SparsityEstimate
{
public long estimatedNonZeros=0;
public boolean sparse=false;
public SparsityEstimate(boolean sps, long nnzs)
{
sparse=sps;
estimatedNonZeros=nnzs;
}
public SparsityEstimate(){}
}
}
| SystemML/SystemML/src/main/java/com/ibm/bi/dml/runtime/matrix/data/MatrixBlock.java | /**
* IBM Confidential
* OCO Source Materials
* (C) Copyright IBM Corp. 2010, 2015
* The source code for this program is not published or otherwise divested of its trade secrets, irrespective of what has been deposited with the U.S. Copyright Office.
*/
package com.ibm.bi.dml.runtime.matrix.data;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.math3.random.Well1024a;
import org.apache.hadoop.io.DataInputBuffer;
import com.ibm.bi.dml.conf.ConfigurationManager;
import com.ibm.bi.dml.conf.DMLConfig;
import com.ibm.bi.dml.lops.MMTSJ.MMTSJType;
import com.ibm.bi.dml.lops.MapMultChain.ChainType;
import com.ibm.bi.dml.lops.PartialAggregate.CorrectionLocationType;
import com.ibm.bi.dml.lops.WeightedSquaredLoss.WeightsType;
import com.ibm.bi.dml.parser.DMLTranslator;
import com.ibm.bi.dml.runtime.DMLRuntimeException;
import com.ibm.bi.dml.runtime.DMLUnsupportedOperationException;
import com.ibm.bi.dml.runtime.functionobjects.And;
import com.ibm.bi.dml.runtime.functionobjects.Builtin;
import com.ibm.bi.dml.runtime.functionobjects.CM;
import com.ibm.bi.dml.runtime.functionobjects.CTable;
import com.ibm.bi.dml.runtime.functionobjects.DiagIndex;
import com.ibm.bi.dml.runtime.functionobjects.KahanPlus;
import com.ibm.bi.dml.runtime.functionobjects.Multiply;
import com.ibm.bi.dml.runtime.functionobjects.Plus;
import com.ibm.bi.dml.runtime.functionobjects.ReduceAll;
import com.ibm.bi.dml.runtime.functionobjects.SortIndex;
import com.ibm.bi.dml.runtime.functionobjects.SwapIndex;
import com.ibm.bi.dml.runtime.instructions.cp.CM_COV_Object;
import com.ibm.bi.dml.runtime.instructions.cp.DoubleObject;
import com.ibm.bi.dml.runtime.instructions.cp.KahanObject;
import com.ibm.bi.dml.runtime.instructions.cp.ScalarObject;
import com.ibm.bi.dml.runtime.instructions.mr.RangeBasedReIndexInstruction.IndexRange;
import com.ibm.bi.dml.runtime.matrix.data.LibMatrixBincell.BinaryAccessType;
import com.ibm.bi.dml.runtime.matrix.mapred.IndexedMatrixValue;
import com.ibm.bi.dml.runtime.matrix.mapred.MRJobConfiguration;
import com.ibm.bi.dml.runtime.matrix.operators.AggregateBinaryOperator;
import com.ibm.bi.dml.runtime.matrix.operators.AggregateOperator;
import com.ibm.bi.dml.runtime.matrix.operators.AggregateUnaryOperator;
import com.ibm.bi.dml.runtime.matrix.operators.BinaryOperator;
import com.ibm.bi.dml.runtime.matrix.operators.CMOperator;
import com.ibm.bi.dml.runtime.matrix.operators.COVOperator;
import com.ibm.bi.dml.runtime.matrix.operators.Operator;
import com.ibm.bi.dml.runtime.matrix.operators.ReorgOperator;
import com.ibm.bi.dml.runtime.matrix.operators.ScalarOperator;
import com.ibm.bi.dml.runtime.matrix.operators.UnaryOperator;
import com.ibm.bi.dml.runtime.util.FastBufferedDataInputStream;
import com.ibm.bi.dml.runtime.util.UtilFunctions;
public class MatrixBlock extends MatrixValue implements Serializable
{
@SuppressWarnings("unused")
private static final String _COPYRIGHT = "Licensed Materials - Property of IBM\n(C) Copyright IBM Corp. 2010, 2015\n" +
"US Government Users Restricted Rights - Use, duplication disclosure restricted by GSA ADP Schedule Contract with IBM Corp.";
private static final long serialVersionUID = 7319972089143154056L;
//sparsity nnz threshold, based on practical experiments on space consumption and performance
public static final double SPARSITY_TURN_POINT = 0.4;
//sparsity threshold for ultra-sparse matrix operations (40nnz in a 1kx1k block)
public static final double ULTRA_SPARSITY_TURN_POINT = 0.00004;
//basic header (int rlen, int clen, byte type)
public static final int HEADER_SIZE = 9;
public enum BlockType{
EMPTY_BLOCK,
ULTRA_SPARSE_BLOCK, //ultra sparse representation, in-mem same as sparse
SPARSE_BLOCK, //sparse representation, see sparseRows
DENSE_BLOCK, //dense representation, see denseBlock
}
//matrix meta data
protected int rlen = -1;
protected int clen = -1;
protected boolean sparse = true;
protected long nonZeros = 0;
//matrix data (sparse or dense)
protected double[] denseBlock = null;
protected SparseRow[] sparseRows = null;
//sparse-block-specific attributes (allocation only)
protected int estimatedNNzsPerRow = -1;
//ctable-specific attributes
protected int maxrow = -1;
protected int maxcolumn = -1;
//grpaggregate-specific attributes (optional)
protected int numGroups = -1;
//diag-specific attributes (optional)
protected boolean diag = false;
////////
// Matrix Constructors
//
public MatrixBlock()
{
rlen = 0;
clen = 0;
sparse = true;
nonZeros = 0;
maxrow = 0;
maxcolumn = 0;
}
public MatrixBlock(int rl, int cl, boolean sp)
{
rlen = rl;
clen = cl;
sparse = sp;
nonZeros = 0;
maxrow = 0;
maxcolumn = 0;
}
public MatrixBlock(int rl, int cl, boolean sp, long estnnzs)
{
this(rl, cl, sp);
estimatedNNzsPerRow=(int)Math.ceil((double)estnnzs/(double)rl);
}
public MatrixBlock(MatrixBlock that)
{
this.copy(that);
}
////////
// Initialization methods
// (reset, init, allocate, etc)
public void reset()
{
reset(-rlen);
}
public void reset(long estnnzs)
{
estimatedNNzsPerRow=(int)Math.ceil((double)estnnzs/(double)rlen);
if(sparse)
{
resetSparse();
}
else
{
if(denseBlock!=null)
{
if(denseBlock.length<rlen*clen)
denseBlock=null;
else
Arrays.fill(denseBlock, 0, rlen*clen, 0);
}
}
nonZeros=0;
//operation-specific attributes
maxrow = rlen;
maxcolumn = clen;
numGroups = -1;
}
public void reset(int rl, int cl) {
rlen=rl;
clen=cl;
nonZeros=0;
reset();
}
public void reset(int rl, int cl, long estnnzs) {
rlen=rl;
clen=cl;
nonZeros=0;
reset(estnnzs);
}
public void reset(int rl, int cl, boolean sp)
{
sparse=sp;
reset(rl, cl);
}
public void reset(int rl, int cl, boolean sp, long estnnzs)
{
sparse=sp;
reset(rl, cl, estnnzs);
}
public void resetSparse()
{
if(sparseRows!=null)
{
for(int i=0; i<Math.min(rlen, sparseRows.length); i++)
if(sparseRows[i]!=null)
sparseRows[i].reset(estimatedNNzsPerRow, clen);
}
}
public void resetDenseWithValue(int rl, int cl, double v)
throws DMLRuntimeException
{
estimatedNNzsPerRow=-1;
rlen=rl;
clen=cl;
sparse=false;
if(v==0)
{
reset();
return;
}
//allocate dense block
allocateDenseBlock();
//init with constant value (non-zero, see above)
int limit = rlen * clen;
Arrays.fill(denseBlock, 0, limit, v);
nonZeros=limit;
}
/**
* NOTE: This method is designed only for dense representation.
*
* @param arr
* @param r
* @param c
* @throws DMLRuntimeException
*/
public void init(double[][] arr, int r, int c)
throws DMLRuntimeException
{
//input checks
if ( sparse )
throw new DMLRuntimeException("MatrixBlockDSM.init() can be invoked only on matrices with dense representation.");
if( r*c > rlen*clen )
throw new DMLRuntimeException("MatrixBlockDSM.init() invoked with too large dimensions ("+r+","+c+") vs ("+rlen+","+clen+")");
//allocate or resize dense block
allocateDenseBlock();
//copy and compute nnz
for(int i=0, ix=0; i < r; i++, ix+=clen)
System.arraycopy(arr[i], 0, denseBlock, ix, arr[i].length);
recomputeNonZeros();
maxrow = r;
maxcolumn = c;
}
/**
* NOTE: This method is designed only for dense representation.
*
* @param arr
* @param r
* @param c
* @throws DMLRuntimeException
*/
public void init(double[] arr, int r, int c)
throws DMLRuntimeException
{
//input checks
if ( sparse )
throw new DMLRuntimeException("MatrixBlockDSM.init() can be invoked only on matrices with dense representation.");
if( r*c > rlen*clen )
throw new DMLRuntimeException("MatrixBlockDSM.init() invoked with too large dimensions ("+r+","+c+") vs ("+rlen+","+clen+")");
//allocate or resize dense block
allocateDenseBlock();
//copy and compute nnz
System.arraycopy(arr, 0, denseBlock, 0, arr.length);
recomputeNonZeros();
maxrow = r;
maxcolumn = c;
}
/**
*
* @param val
* @param r
* @param c
* @throws DMLRuntimeException
*/
public void init(double val, int r, int c)
throws DMLRuntimeException
{
//input checks
if ( sparse )
throw new DMLRuntimeException("MatrixBlockDSM.init() can be invoked only on matrices with dense representation.");
if( r*c > rlen*clen )
throw new DMLRuntimeException("MatrixBlockDSM.init() invoked with too large dimensions ("+r+","+c+") vs ("+rlen+","+clen+")");
if( val != 0 ) {
//allocate or resize dense block
allocateDenseBlock();
if( r*c == rlen*clen ) { //FULL MATRIX INIT
//memset value
Arrays.fill(denseBlock, val);
}
else { //PARTIAL MATRIX INIT
//rowwise memset value
for(int i=0, ix=0; i < r; i++, ix+=clen)
Arrays.fill(denseBlock, ix, ix+c, val);
}
//set non zeros to input dims
nonZeros = r*c;
}
maxrow = r;
maxcolumn = c;
}
/**
*
* @return
*/
public boolean isAllocated()
{
if( sparse )
return (sparseRows!=null);
else
return (denseBlock!=null);
}
/**
* @throws DMLRuntimeException
*
*/
public void allocateDenseBlock()
throws DMLRuntimeException
{
allocateDenseBlock( true );
}
/**
*
* @param clearNNZ
* @throws DMLRuntimeException
*/
public void allocateDenseBlock(boolean clearNNZ)
throws DMLRuntimeException
{
long limit = (long)rlen * clen;
//check max size constraint (16GB dense), since java arrays are limited to 2^(32-1) elements)
if( limit > Integer.MAX_VALUE ) {
throw new DMLRuntimeException("Dense in-memory matrix block ("+rlen+"x"+clen+") exceeds supported size of "+Integer.MAX_VALUE+" elements (16GB). " +
"Please, reduce the JVM heapsize to execute this in MR.");
}
//allocate block if non-existing or too small (guaranteed to be 0-initialized),
if(denseBlock == null || denseBlock.length < limit ) {
denseBlock = new double[(int)limit];
}
//clear nnz if necessary
if( clearNNZ ) {
nonZeros = 0;
}
}
/**
*
*/
public void allocateSparseRowsBlock()
{
allocateSparseRowsBlock(true);
}
/**
*
* @param clearNNZ
*/
public void allocateSparseRowsBlock(boolean clearNNZ)
{
//allocate block if non-existing or too small (guaranteed to be 0-initialized),
if( sparseRows == null ) {
sparseRows=new SparseRow[rlen];
}
else if( sparseRows.length < rlen ) {
SparseRow[] oldSparseRows=sparseRows;
sparseRows = new SparseRow[rlen];
for(int i=0; i<Math.min(oldSparseRows.length, rlen); i++) {
sparseRows[i]=oldSparseRows[i];
}
}
//clear nnz if necessary
if( clearNNZ ) {
nonZeros = 0;
}
}
/**
* This should be called only in the read and write functions for CP
* This function should be called before calling any setValueDenseUnsafe()
*
* @param rl
* @param cl
* @throws DMLRuntimeException
*/
public void allocateDenseBlockUnsafe(int rl, int cl)
throws DMLRuntimeException
{
sparse=false;
rlen=rl;
clen=cl;
//allocate dense block
allocateDenseBlock();
}
/**
* Allows to cleanup all previously allocated sparserows or denseblocks.
* This is for example required in reading a matrix with many empty blocks
* via distributed cache into in-memory list of blocks - not cleaning blocks
* from non-empty blocks would significantly increase the total memory consumption.
*
*/
public void cleanupBlock( boolean dense, boolean sparse )
{
if(dense)
denseBlock = null;
if(sparse)
sparseRows = null;
}
////////
// Metadata information
public int getNumRows()
{
return rlen;
}
/**
* NOTE: setNumRows() and setNumColumns() are used only in tertiaryInstruction (for contingency tables)
* and pmm for meta corrections.
*
* @param _r
*/
public void setNumRows(int r)
{
rlen = r;
}
public int getNumColumns()
{
return clen;
}
public void setNumColumns(int c)
{
clen = c;
}
public long getNonZeros()
{
return nonZeros;
}
public boolean isVector()
{
return (rlen == 1 || clen == 1);
}
/**
* Return the maximum row encountered WITHIN the current block
*
*/
public int getMaxRow()
{
if (!sparse)
return getNumRows();
else
return maxrow;
}
public void setMaxRow(int r)
{
maxrow = r;
}
/**
* Return the maximum column encountered WITHIN the current block
*
*/
public int getMaxColumn()
{
if (!sparse)
return getNumColumns();
else
return maxcolumn;
}
public void setMaxColumn(int c)
{
maxcolumn = c;
}
@Override
public boolean isEmpty()
{
return isEmptyBlock(false);
}
public boolean isEmptyBlock()
{
return isEmptyBlock(true);
}
public boolean isEmptyBlock(boolean safe)
{
boolean ret = false;
if( sparse && sparseRows==null )
ret = true;
else if( !sparse && denseBlock==null )
ret = true;
if( nonZeros==0 )
{
//prevent under-estimation
if(safe)
recomputeNonZeros();
ret = (nonZeros==0);
}
return ret;
}
public void setDiag()
{
diag = true;
}
public boolean isDiag()
{
return diag;
}
////////
// Data handling
public double[] getDenseArray()
{
if(sparse)
return null;
return denseBlock;
}
public SparseRow[] getSparseRows()
{
if(!sparse)
return null;
return sparseRows;
}
public SparseRowsIterator getSparseRowsIterator()
{
//check for valid format, should have been checked from outside
if( !sparse )
throw new RuntimeException("getSparseCellInterator should not be called for dense format");
return new SparseRowsIterator(rlen, sparseRows);
}
public SparseRowsIterator getSparseRowsIterator(int rowStart, int rowNum)
{
//check for valid format, should have been checked from outside
if( !sparse )
throw new RuntimeException("getSparseCellInterator should not be called for dense format");
return new SparseRowsIterator(rowStart, rowStart+rowNum, sparseRows);
}
@Override
public void getCellValues(Collection<Double> ret)
{
int limit=rlen*clen;
if(sparse)
{
if(sparseRows==null)
{
for(int i=0; i<limit; i++)
ret.add(0.0);
}else
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
double[] container=sparseRows[r].getValueContainer();
for(int j=0; j<sparseRows[r].size(); j++)
ret.add(container[j]);
}
int zeros=limit-ret.size();
for(int i=0; i<zeros; i++)
ret.add(0.0);
}
}else
{
if(denseBlock==null)
{
for(int i=0; i<limit; i++)
ret.add(0.0);
}else
{
for(int i=0; i<limit; i++)
ret.add(denseBlock[i]);
}
}
}
@Override
public void getCellValues(Map<Double, Integer> ret)
{
int limit=rlen*clen;
if(sparse)
{
if(sparseRows==null)
{
ret.put(0.0, limit);
}else
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
double[] container=sparseRows[r].getValueContainer();
for(int j=0; j<sparseRows[r].size(); j++)
{
Double v=container[j];
Integer old=ret.get(v);
if(old!=null)
ret.put(v, old+1);
else
ret.put(v, 1);
}
}
int zeros=limit-ret.size();
Integer old=ret.get(0.0);
if(old!=null)
ret.put(0.0, old+zeros);
else
ret.put(0.0, zeros);
}
}else
{
if(denseBlock==null)
{
ret.put(0.0, limit);
}else
{
for(int i=0; i<limit; i++)
{
double v=denseBlock[i];
Integer old=ret.get(v);
if(old!=null)
ret.put(v, old+1);
else
ret.put(v, 1);
}
}
}
}
@Override
public double getValue(int r, int c)
{
if(r>rlen || c > clen)
throw new RuntimeException("indexes ("+r+","+c+") out of range ("+rlen+","+clen+")");
if(sparse)
{
if(sparseRows==null || sparseRows.length<=r || sparseRows[r]==null)
return 0;
return sparseRows[r].get(c);
}else
{
if(denseBlock==null)
return 0;
return denseBlock[r*clen+c];
}
}
@Override
public void setValue(int r, int c, double v)
{
if(r>rlen || c > clen)
throw new RuntimeException("indexes ("+r+","+c+") out of range ("+rlen+","+clen+")");
if(sparse)
{
if( (sparseRows==null || sparseRows.length<=r || sparseRows[r]==null) && v==0.0)
return;
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(estimatedNNzsPerRow, clen);
if(sparseRows[r].set(c, v))
nonZeros++;
}else
{
if(denseBlock==null && v==0.0)
return;
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
int index=r*clen+c;
if(denseBlock[index]==0)
nonZeros++;
denseBlock[index]=v;
if(v==0)
nonZeros--;
}
}
@Override
public void setValue(CellIndex index, double v)
{
setValue(index.row, index.column, v);
}
@Override
/**
* If (r,c) \in Block, add v to existing cell
* If not, add a new cell with index (r,c).
*
* This function intentionally avoids the maintenance of NNZ for efficiency.
*
*/
public void addValue(int r, int c, double v) {
if(sparse)
{
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(estimatedNNzsPerRow, clen);
double curV=sparseRows[r].get(c);
curV+=v;
sparseRows[r].set(c, curV);
}
else
{
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
int index=r*clen+c;
denseBlock[index]+=v;
}
}
public double quickGetValue(int r, int c)
{
if(sparse)
{
if(sparseRows==null || sparseRows.length<=r || sparseRows[r]==null)
return 0;
return sparseRows[r].get(c);
}
else
{
if(denseBlock==null)
return 0;
return denseBlock[r*clen+c];
}
}
public void quickSetValue(int r, int c, double v)
{
if(sparse)
{
if( (sparseRows==null || sparseRows.length<=r || sparseRows[r]==null) && v==0.0)
return;
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(estimatedNNzsPerRow, clen);
if(sparseRows[r].set(c, v))
nonZeros++;
}
else
{
if(denseBlock==null && v==0.0)
return;
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
int index=r*clen+c;
if(denseBlock[index]==0)
nonZeros++;
denseBlock[index]=v;
if(v==0)
nonZeros--;
}
}
public double getValueDenseUnsafe(int r, int c)
{
if(denseBlock==null)
return 0;
return denseBlock[r*clen+c];
}
/**
* This can be only called when you know you have properly allocated spaces for a dense representation
* and r and c are in the the range of the dimension
* Note: this function won't keep track of the nozeros
*/
public void setValueDenseUnsafe(int r, int c, double v)
{
denseBlock[r*clen+c]=v;
}
public double getValueSparseUnsafe(int r, int c)
{
if(sparseRows==null || sparseRows.length<=r || sparseRows[r]==null)
return 0;
return sparseRows[r].get(c);
}
/**
* Append value is only used when values are appended at the end of each row for the sparse representation
* This can only be called, when the caller knows the access pattern of the block
* @param r
* @param c
* @param v
*/
public void appendValue(int r, int c, double v)
{
if(v==0) return;
if(!sparse)
quickSetValue(r, c, v);
else
{
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(estimatedNNzsPerRow, clen);
sparseRows[r].append(c, v);
nonZeros++;
}
}
public void appendRow(int r, SparseRow values)
{
if(values==null)
return;
if(sparse)
{
//allocation on demand
allocateSparseRowsBlock(false);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(values);
else
sparseRows[r].copy(values);
nonZeros+=values.size();
}else
{
int[] cols=values.getIndexContainer();
double[] vals=values.getValueContainer();
for(int i=0; i<values.size(); i++)
quickSetValue(r, cols[i], vals[i]);
}
}
/**
*
* @param that
* @param rowoffset
* @param coloffset
*/
public void appendToSparse( MatrixBlock that, int rowoffset, int coloffset )
{
if( that==null || that.isEmptyBlock(false) )
return; //nothing to append
//init sparse rows if necessary
allocateSparseRowsBlock(false);
if( that.sparse ) //SPARSE <- SPARSE
{
for( int i=0; i<that.rlen; i++ )
{
SparseRow brow = that.sparseRows[i];
if( brow!=null && !brow.isEmpty() )
{
int aix = rowoffset+i;
int len = brow.size();
int[] ix = brow.getIndexContainer();
double[] val = brow.getValueContainer();
if( sparseRows[aix]==null )
sparseRows[aix] = new SparseRow(estimatedNNzsPerRow,clen);
for( int j=0; j<len; j++ )
sparseRows[aix].append(coloffset+ix[j], val[j]);
}
}
}
else //SPARSE <- DENSE
{
for( int i=0; i<that.rlen; i++ )
{
int aix = rowoffset+i;
for( int j=0, bix=i*that.clen; j<that.clen; j++ )
{
double val = that.denseBlock[bix+j];
if( val != 0 )
{
if( sparseRows[aix]==null )//create sparserow only if required
sparseRows[aix] = new SparseRow(estimatedNNzsPerRow,clen);
sparseRows[aix].append(coloffset+j, val);
}
}
}
}
}
/**
*
*/
public void sortSparseRows()
{
if( !sparse || sparseRows==null )
return;
for( SparseRow arow : sparseRows )
if( arow!=null && arow.size()>1 )
arow.sort();
}
/**
* Utility function for computing the min non-zero value.
*
* @return
* @throws DMLRuntimeException
*/
public double minNonZero()
throws DMLRuntimeException
{
//check for empty block and return immediately
if( isEmptyBlock() )
return -1;
//NOTE: usually this method is only applied on dense vectors and hence not really tuned yet.
double min = Double.MAX_VALUE;
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ ){
double val = quickGetValue(i, j);
if( val != 0 )
min = Math.min(min, val);
}
return min;
}
/**
* Wrapper method for reduceall-min of a matrix.
*
* @return
* @throws DMLRuntimeException
*/
public double min()
throws DMLRuntimeException
{
//construct operator
AggregateOperator aop = new AggregateOperator(Double.MAX_VALUE, Builtin.getBuiltinFnObject("min"));
AggregateUnaryOperator auop = new AggregateUnaryOperator( aop, ReduceAll.getReduceAllFnObject());
//execute operation
MatrixBlock out = new MatrixBlock(1, 1, false);
LibMatrixAgg.aggregateUnaryMatrix(this, out, auop);
return out.quickGetValue(0, 0);
}
/**
* Wrapper method for reduceall-max of a matrix.
*
* @return
* @throws DMLRuntimeException
*/
public double max()
throws DMLRuntimeException
{
//construct operator
AggregateOperator aop = new AggregateOperator(-Double.MAX_VALUE, Builtin.getBuiltinFnObject("max"));
AggregateUnaryOperator auop = new AggregateUnaryOperator( aop, ReduceAll.getReduceAllFnObject());
//execute operation
MatrixBlock out = new MatrixBlock(1, 1, false);
LibMatrixAgg.aggregateUnaryMatrix(this, out, auop);
return out.quickGetValue(0, 0);
}
////////
// sparsity handling functions
/**
* Returns the current representation (true for sparse).
*
*/
public boolean isInSparseFormat()
{
return sparse;
}
/**
*
* @return
*/
public boolean isUltraSparse()
{
double sp = ((double)nonZeros/rlen)/clen;
//check for sparse representation in order to account for vectors in dense
return sparse && sp<ULTRA_SPARSITY_TURN_POINT && nonZeros<40;
}
/**
* Evaluates if this matrix block should be in sparse format in
* memory. Note that this call does not change the representation -
* for this please call examSparsity.
*
* @return
*/
public boolean evalSparseFormatInMemory()
{
long lrlen = (long) rlen;
long lclen = (long) clen;
long lnonZeros = (long) nonZeros;
//ensure exact size estimates for write
if( lnonZeros<=0 ) {
recomputeNonZeros();
lnonZeros = (long) nonZeros;
}
//decide on in-memory representation
return evalSparseFormatInMemory(lrlen, lclen, lnonZeros);
}
private boolean evalSparseFormatInMemory(boolean transpose)
{
int lrlen = (transpose) ? clen : rlen;
int lclen = (transpose) ? rlen : clen;
long lnonZeros = (long) nonZeros;
//ensure exact size estimates for write
if( lnonZeros<=0 ) {
recomputeNonZeros();
lnonZeros = (long) nonZeros;
}
//decide on in-memory representation
return evalSparseFormatInMemory(lrlen, lclen, lnonZeros);
}
/**
* Evaluates if this matrix block should be in sparse format on
* disk. This applies to any serialized matrix representation, i.e.,
* when writing to in-memory buffer pool pages or writing to local fs
* or hdfs.
*
* @return
*/
public boolean evalSparseFormatOnDisk()
{
long lrlen = (long) rlen;
long lclen = (long) clen;
//ensure exact size estimates for write
if( nonZeros <= 0 ) {
recomputeNonZeros();
}
//decide on in-memory representation
return evalSparseFormatOnDisk(lrlen, lclen, nonZeros);
}
/**
* Evaluates if this matrix block should be in sparse format in
* memory. Depending on the current representation, the state of the
* matrix block is changed to the right representation if necessary.
* Note that this consumes for the time of execution memory for both
* representations.
*
* @throws DMLRuntimeException
*/
public void examSparsity()
throws DMLRuntimeException
{
//determine target representation
boolean sparseDst = evalSparseFormatInMemory();
//check for empty blocks (e.g., sparse-sparse)
if( isEmptyBlock(false) )
cleanupBlock(true, true);
//change representation if required (also done for
//empty blocks in order to set representation flags)
if( sparse && !sparseDst)
sparseToDense();
else if( !sparse && sparseDst )
denseToSparse();
}
/**
* Evaluates if a matrix block with the given characteristics should be in sparse format
* in memory.
*
* @param brlen
* @param bclen
* @param nnz
* @return
*/
public static boolean evalSparseFormatInMemory( final long nrows, final long ncols, final long nnz )
{
//evaluate sparsity threshold
double lsparsity = (double)nnz/nrows/ncols;
boolean lsparse = (lsparsity < SPARSITY_TURN_POINT);
//compare size of sparse and dense representation in order to prevent
//that the sparse size exceed the dense size since we use the dense size
//as worst-case estimate if unknown (and it requires less io from
//main memory).
double sizeSparse = estimateSizeSparseInMemory(nrows, ncols, lsparsity);
double sizeDense = estimateSizeDenseInMemory(nrows, ncols);
return lsparse && (sizeSparse<sizeDense);
}
/**
* Evaluates if a matrix block with the given characteristics should be in sparse format
* on disk (or in any other serialized representation).
*
* @param brlen
* @param bclen
* @param nnz
* @return
*/
public static boolean evalSparseFormatOnDisk( final long nrows, final long ncols, final long nnz )
{
//evaluate sparsity threshold
double lsparsity = ((double)nnz/nrows)/ncols;
boolean lsparse = (lsparsity < SPARSITY_TURN_POINT);
double sizeUltraSparse = estimateSizeUltraSparseOnDisk( nrows, ncols, nnz );
double sizeSparse = estimateSizeSparseOnDisk(nrows, ncols, nnz);
double sizeDense = estimateSizeDenseOnDisk(nrows, ncols);
return lsparse && (sizeSparse<sizeDense || sizeUltraSparse<sizeDense);
}
////////
// basic block handling functions
/**
*
*/
private void denseToSparse()
{
//set target representation
sparse = true;
//early abort on empty blocks
if(denseBlock==null)
return;
//allocate sparse target block (reset requires to maintain nnz again)
allocateSparseRowsBlock();
reset();
//copy dense to sparse
double[] a = denseBlock;
SparseRow[] c = sparseRows;
for( int i=0, aix=0; i<rlen; i++ )
for(int j=0; j<clen; j++, aix++)
if( a[aix] != 0 ) {
if( c[i]==null ) //create sparse row only if required
c[i]=new SparseRow(estimatedNNzsPerRow, clen);
c[i].append(j, a[aix]);
nonZeros++;
}
//cleanup dense block
denseBlock = null;
}
/**
*
* @throws DMLRuntimeException
*/
private void sparseToDense()
throws DMLRuntimeException
{
//set target representation
sparse = false;
//early abort on empty blocks
if(sparseRows==null)
return;
int limit=rlen*clen;
if ( limit < 0 ) {
throw new DMLRuntimeException("Unexpected error in sparseToDense().. limit < 0: " + rlen + ", " + clen + ", " + limit);
}
//allocate dense target block, but keep nnz (no need to maintain)
allocateDenseBlock(false);
Arrays.fill(denseBlock, 0, limit, 0);
//copy sparse to dense
SparseRow[] a = sparseRows;
double[] c = denseBlock;
for( int i=0, cix=0; i<rlen; i++, cix+=clen)
if( a[i] != null && !a[i].isEmpty() ) {
int alen = a[i].size();
int[] aix = a[i].getIndexContainer();
double[] avals = a[i].getValueContainer();
for(int j=0; j<alen; j++)
if( avals[j] != 0 )
c[ cix+aix[j] ] = avals[j];
}
//cleanup sparse rows
sparseRows = null;
}
public void recomputeNonZeros()
{
nonZeros=0;
if( sparse && sparseRows!=null )
{
int limit = Math.min(rlen, sparseRows.length);
for(int i=0; i<limit; i++)
if(sparseRows[i]!=null)
nonZeros += sparseRows[i].size();
}
else if( !sparse && denseBlock!=null )
{
int limit=rlen*clen;
for(int i=0; i<limit; i++)
{
//HotSpot JVM bug causes crash in presence of NaNs
//nonZeros += (denseBlock[i]!=0) ? 1 : 0;
if( denseBlock[i]!=0 )
nonZeros++;
}
}
}
private long recomputeNonZeros(int rl, int ru, int cl, int cu)
{
long nnz = 0;
if(sparse)
{
if(sparseRows!=null)
{
int rlimit = Math.min( ru+1, Math.min(rlen, sparseRows.length) );
if( cl==0 && cu==clen-1 ) //specific case: all cols
{
for(int i=rl; i<rlimit; i++)
if(sparseRows[i]!=null && !sparseRows[i].isEmpty())
nnz+=sparseRows[i].size();
}
else if( cl==cu ) //specific case: one column
{
for(int i=rl; i<rlimit; i++)
if(sparseRows[i]!=null && !sparseRows[i].isEmpty())
nnz += (sparseRows[i].get(cl)!=0) ? 1 : 0;
}
else //general case
{
int astart,aend;
for(int i=rl; i<rlimit; i++)
if(sparseRows[i]!=null && !sparseRows[i].isEmpty())
{
SparseRow arow = sparseRows[i];
astart = arow.searchIndexesFirstGTE(cl);
aend = arow.searchIndexesFirstGTE(cu);
nnz += (astart!=-1) ? (aend-astart+1) : 0;
}
}
}
}else
{
if(denseBlock!=null)
{
for( int i=rl, ix=rl*clen; i<=ru; i++, ix+=clen )
for( int j=cl; j<=cu; j++ )
{
//HotSpot JVM bug causes crash in presence of NaNs
//nnz += (denseBlock[ix+j]!=0) ? 1 : 0;
if( denseBlock[ix+j]!=0 )
nnz++;
}
}
}
return nnz;
}
public void copy(MatrixValue thatValue)
{
MatrixBlock that;
try {
that = checkType(thatValue);
} catch (DMLUnsupportedOperationException e) {
throw new RuntimeException(e);
}
if( this == that ) //prevent data loss (e.g., on sparse-dense conversion)
throw new RuntimeException( "Copy must not overwrite itself!" );
this.rlen = that.rlen;
this.clen = that.clen;
this.sparse = that.evalSparseFormatInMemory();
estimatedNNzsPerRow=(int)Math.ceil((double)thatValue.getNonZeros()/(double)rlen);
if(this.sparse && that.sparse)
copySparseToSparse(that);
else if(this.sparse && !that.sparse)
copyDenseToSparse(that);
else if(!this.sparse && that.sparse)
copySparseToDense(that);
else
copyDenseToDense(that);
}
public void copy(MatrixValue thatValue, boolean sp) {
MatrixBlock that;
try {
that = checkType(thatValue);
} catch (DMLUnsupportedOperationException e) {
throw new RuntimeException(e);
}
if( this == that ) //prevent data loss (e.g., on sparse-dense conversion)
throw new RuntimeException( "Copy must not overwrite itself!" );
this.rlen=that.rlen;
this.clen=that.clen;
this.sparse=sp;
estimatedNNzsPerRow=(int)Math.ceil((double)thatValue.getNonZeros()/(double)rlen);
if(this.sparse && that.sparse)
copySparseToSparse(that);
else if(this.sparse && !that.sparse)
copyDenseToSparse(that);
else if(!this.sparse && that.sparse)
copySparseToDense(that);
else
copyDenseToDense(that);
}
private void copySparseToSparse(MatrixBlock that)
{
this.nonZeros=that.nonZeros;
if( that.isEmptyBlock(false) )
{
resetSparse();
return;
}
allocateSparseRowsBlock(false);
for(int i=0; i<Math.min(that.sparseRows.length, rlen); i++)
{
if(that.sparseRows[i]!=null)
{
if(sparseRows[i]==null)
sparseRows[i]=new SparseRow(that.sparseRows[i]);
else
sparseRows[i].copy(that.sparseRows[i]);
}else if(this.sparseRows[i]!=null)
this.sparseRows[i].reset(estimatedNNzsPerRow, clen);
}
}
private void copyDenseToDense(MatrixBlock that)
{
nonZeros = that.nonZeros;
int limit = rlen*clen;
//plain reset to 0 for empty input
if( that.isEmptyBlock(false) )
{
if(denseBlock!=null)
Arrays.fill(denseBlock, 0, limit, 0);
return;
}
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
//actual copy
System.arraycopy(that.denseBlock, 0, denseBlock, 0, limit);
}
private void copySparseToDense(MatrixBlock that)
{
this.nonZeros=that.nonZeros;
if( that.isEmptyBlock(false) )
{
if(denseBlock!=null)
Arrays.fill(denseBlock, 0);
return;
}
//allocate and init dense block (w/o overwriting nnz)
try {
allocateDenseBlock(false);
}
catch(DMLRuntimeException e){
throw new RuntimeException(e);
}
int start=0;
for(int r=0; r<Math.min(that.sparseRows.length, rlen); r++, start+=clen)
{
if(that.sparseRows[r]==null)
continue;
double[] values=that.sparseRows[r].getValueContainer();
int[] cols=that.sparseRows[r].getIndexContainer();
for(int i=0; i<that.sparseRows[r].size(); i++)
{
denseBlock[start+cols[i]]=values[i];
}
}
}
private void copyDenseToSparse(MatrixBlock that)
{
nonZeros = that.nonZeros;
if( that.isEmptyBlock(false) )
{
resetSparse();
return;
}
allocateSparseRowsBlock(false);
for(int i=0, ix=0; i<rlen; i++)
{
if( sparseRows[i]!=null )
sparseRows[i].reset(estimatedNNzsPerRow, clen);
for(int j=0; j<clen; j++)
{
double val = that.denseBlock[ix++];
if( val != 0 )
{
if(sparseRows[i]==null) //create sparse row only if required
sparseRows[i]=new SparseRow(estimatedNNzsPerRow, clen);
sparseRows[i].append(j, val);
}
}
}
}
/**
* In-place copy of matrix src into the index range of the existing current matrix.
* Note that removal of existing nnz in the index range and nnz maintenance is
* only done if 'awareDestNZ=true',
*
* @param rl
* @param ru
* @param cl
* @param cu
* @param src
* @param awareDestNZ
* true, forces (1) to remove existing non-zeros in the index range of the
* destination if not present in src and (2) to internally maintain nnz
* false, assume empty index range in destination and do not maintain nnz
* (the invoker is responsible to recompute nnz after all copies are done)
* @throws DMLRuntimeException
*/
public void copy(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ )
throws DMLRuntimeException
{
if(sparse && src.sparse)
copySparseToSparse(rl, ru, cl, cu, src, awareDestNZ);
else if(sparse && !src.sparse)
copyDenseToSparse(rl, ru, cl, cu, src, awareDestNZ);
else if(!sparse && src.sparse)
copySparseToDense(rl, ru, cl, cu, src, awareDestNZ);
else
copyDenseToDense(rl, ru, cl, cu, src, awareDestNZ);
}
private void copySparseToSparse(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ)
{
//handle empty src and dest
if( src.isEmptyBlock(false) )
{
if( awareDestNZ && sparseRows != null )
copyEmptyToSparse(rl, ru, cl, cu, true);
return;
}
if(sparseRows==null)
sparseRows=new SparseRow[rlen];
else if( awareDestNZ )
{
copyEmptyToSparse(rl, ru, cl, cu, true);
//explicit clear if awareDestNZ because more efficient since
//src will have multiple columns and only few overwriting values
}
//copy values
int alen;
int[] aix;
double[] avals;
for( int i=0; i<src.rlen; i++ )
{
SparseRow arow = src.sparseRows[i];
if( arow != null && !arow.isEmpty() )
{
alen = arow.size();
aix = arow.getIndexContainer();
avals = arow.getValueContainer();
if( sparseRows[rl+i] == null || sparseRows[rl+i].isEmpty() )
{
sparseRows[rl+i] = new SparseRow(estimatedNNzsPerRow, clen);
SparseRow brow = sparseRows[rl+i];
for( int j=0; j<alen; j++ )
brow.append(cl+aix[j], avals[j]);
if( awareDestNZ )
nonZeros += brow.size();
}
else if( awareDestNZ ) //general case (w/ awareness NNZ)
{
SparseRow brow = sparseRows[rl+i];
int lnnz = brow.size();
if( cl==cu && cl==aix[0] )
{
if (avals[0]==0)
brow.deleteIndex(cl);
else
brow.set(cl, avals[0] );
}
else
{
brow.deleteIndexRange(cl, cu);
for( int j=0; j<alen; j++ )
brow.set(cl+aix[j], avals[j]);
}
nonZeros += (brow.size() - lnnz);
}
else //general case (w/o awareness NNZ)
{
SparseRow brow = sparseRows[rl+i];
//brow.set(cl, arow);
for( int j=0; j<alen; j++ )
brow.set(cl+aix[j], avals[j]);
}
}
}
}
private void copySparseToDense(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ)
throws DMLRuntimeException
{
//handle empty src and dest
if( src.isEmptyBlock(false) )
{
if( awareDestNZ && denseBlock != null ) {
nonZeros -= recomputeNonZeros(rl, ru, cl, cu);
copyEmptyToDense(rl, ru, cl, cu);
}
return;
}
if(denseBlock==null)
allocateDenseBlock();
else if( awareDestNZ )
{
nonZeros -= recomputeNonZeros(rl, ru, cl, cu);
copyEmptyToDense(rl, ru, cl, cu);
}
//copy values
int alen;
int[] aix;
double[] avals;
for( int i=0, ix=rl*clen; i<src.rlen; i++, ix+=clen )
{
SparseRow arow = src.sparseRows[i];
if( arow != null && !arow.isEmpty() )
{
alen = arow.size();
aix = arow.getIndexContainer();
avals = arow.getValueContainer();
for( int j=0; j<alen; j++ )
denseBlock[ix+cl+aix[j]] = avals[j];
if(awareDestNZ)
nonZeros += alen;
}
}
}
private void copyDenseToSparse(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ)
{
//handle empty src and dest
if( src.isEmptyBlock(false) )
{
if( awareDestNZ && sparseRows != null )
copyEmptyToSparse(rl, ru, cl, cu, true);
return;
}
if(sparseRows==null)
sparseRows=new SparseRow[rlen];
//no need to clear for awareDestNZ since overwritten
//copy values
double val;
for( int i=0, ix=0; i<src.rlen; i++, ix+=src.clen )
{
int rix = rl + i;
if( sparseRows[rix]==null || sparseRows[rix].isEmpty() )
{
for( int j=0; j<src.clen; j++ )
if( (val = src.denseBlock[ix+j]) != 0 )
{
if( sparseRows[rix]==null )
sparseRows[rix] = new SparseRow(estimatedNNzsPerRow, clen);
sparseRows[rix].append(cl+j, val);
}
if( awareDestNZ && sparseRows[rix]!=null )
nonZeros += sparseRows[rix].size();
}
else if( awareDestNZ ) //general case (w/ awareness NNZ)
{
SparseRow brow = sparseRows[rix];
int lnnz = brow.size();
if( cl==cu )
{
if ((val = src.denseBlock[ix])==0)
brow.deleteIndex(cl);
else
brow.set(cl, val);
}
else
{
brow.deleteIndexRange(cl, cu);
for( int j=0; j<src.clen; j++ )
if( (val = src.denseBlock[ix+j]) != 0 )
brow.set(cl+j, val);
}
nonZeros += (brow.size() - lnnz);
}
else //general case (w/o awareness NNZ)
{
SparseRow brow = sparseRows[rix];
for( int j=0; j<src.clen; j++ )
if( (val = src.denseBlock[ix+j]) != 0 )
brow.set(cl+j, val);
}
}
}
private void copyDenseToDense(int rl, int ru, int cl, int cu, MatrixBlock src, boolean awareDestNZ)
throws DMLRuntimeException
{
//handle empty src and dest
if( src.isEmptyBlock(false) )
{
if( awareDestNZ && denseBlock != null ) {
nonZeros -= recomputeNonZeros(rl, ru, cl, cu);
copyEmptyToDense(rl, ru, cl, cu);
}
return;
}
if(denseBlock==null)
allocateDenseBlock();
//no need to clear for awareDestNZ since overwritten
if( awareDestNZ )
nonZeros = nonZeros - recomputeNonZeros(rl, ru, cl, cu) + src.nonZeros;
//copy values
int rowLen = cu-cl+1;
if(clen == src.clen) //optimization for equal width
System.arraycopy(src.denseBlock, 0, denseBlock, rl*clen+cl, src.rlen*src.clen);
else
for( int i=0, ix1=0, ix2=rl*clen+cl; i<src.rlen; i++, ix1+=src.clen, ix2+=clen )
System.arraycopy(src.denseBlock, ix1, denseBlock, ix2, rowLen);
}
private void copyEmptyToSparse(int rl, int ru, int cl, int cu, boolean updateNNZ )
{
if( cl==cu ) //specific case: column vector
{
if( updateNNZ )
{
for( int i=rl; i<=ru; i++ )
if( sparseRows[i] != null && !sparseRows[i].isEmpty() )
{
int lnnz = sparseRows[i].size();
sparseRows[i].deleteIndex(cl);
nonZeros += (sparseRows[i].size()-lnnz);
}
}
else
{
for( int i=rl; i<=ru; i++ )
if( sparseRows[i] != null && !sparseRows[i].isEmpty() )
sparseRows[i].deleteIndex(cl);
}
}
else
{
if( updateNNZ )
{
for( int i=rl; i<=ru; i++ )
if( sparseRows[i] != null && !sparseRows[i].isEmpty() )
{
int lnnz = sparseRows[i].size();
sparseRows[i].deleteIndexRange(cl, cu);
nonZeros += (sparseRows[i].size()-lnnz);
}
}
else
{
for( int i=rl; i<=ru; i++ )
if( sparseRows[i] != null && !sparseRows[i].isEmpty() )
sparseRows[i].deleteIndexRange(cl, cu);
}
}
}
private void copyEmptyToDense(int rl, int ru, int cl, int cu)
{
int rowLen = cu-cl+1;
if(clen == rowLen) //optimization for equal width
Arrays.fill(denseBlock, rl*clen+cl, ru*clen+cu+1, 0);
else
for( int i=rl, ix2=rl*clen+cl; i<=ru; i++, ix2+=clen )
Arrays.fill(denseBlock, ix2, ix2+rowLen, 0);
}
/**
* Merge disjoint: merges all non-zero values of the given input into the current
* matrix block. Note that this method does NOT check for overlapping entries;
* it's the callers reponsibility of ensuring disjoint matrix blocks.
*
* The appendOnly parameter is only relevant for sparse target blocks; if true,
* we only append values and do not sort sparse rows for each call; this is useful
* whenever we merge iterators of matrix blocks into one target block.
*
* @param that
* @param appendOnly
* @throws DMLRuntimeException
*/
public void merge(MatrixBlock that, boolean appendOnly)
throws DMLRuntimeException
{
//check for empty input source (nothing to merge)
if( that == null || that.isEmptyBlock(false) )
return;
//check dimensions (before potentially copy to prevent implicit dimension change)
//this also does a best effort check for disjoint input blocks via the number of non-zeros
if( rlen != that.rlen || clen != that.clen )
throw new DMLRuntimeException("Dimension mismatch on merge disjoint (target="+rlen+"x"+clen+", source="+that.rlen+"x"+that.clen+")");
if( (long)this.nonZeros+ that.nonZeros > (long)rlen*clen )
throw new DMLRuntimeException("Number of non-zeros mismatch on merge disjoint (target="+rlen+"x"+clen+", nnz target="+nonZeros+", nnz source="+that.nonZeros+")");
//check for empty target (copy in full)
if( this.isEmptyBlock(false) ) {
this.copy(that);
return;
}
//core matrix block merge (guaranteed non-empty source/target, nnz maintenance not required)
long nnz = this.nonZeros + that.nonZeros;
if( sparse )
this.mergeIntoSparse(that, appendOnly);
else
this.mergeIntoDense(that);
//maintain number of nonzeros
this.nonZeros = nnz;
}
/**
*
* @param that
*/
private void mergeIntoDense(MatrixBlock that)
{
if( that.sparse ) //DENSE <- SPARSE
{
SparseRow[] b = that.sparseRows;
for( int i=0; i<rlen; i++ )
if( b[i] != null && !b[i].isEmpty() )
{
SparseRow brow = b[i];
int blen = brow.size();
int[] bix = brow.getIndexContainer();
double[] bval = brow.getValueContainer();
for( int j=0; j<blen; j++ )
if( bval[j] != 0 )
this.quickSetValue(i, bix[j], bval[j]);
}
}
else //DENSE <- DENSE
{
double[] a = this.denseBlock;
double[] b = that.denseBlock;
int len = rlen * clen;
for( int i=0; i<len; i++ )
a[i] = ( b[i] != 0 ) ? b[i] : a[i];
}
}
/**
*
* @param that
* @param appendOnly
*/
private void mergeIntoSparse(MatrixBlock that, boolean appendOnly)
{
if( that.sparse ) //SPARSE <- SPARSE
{
SparseRow[] a = this.sparseRows;
SparseRow[] b = that.sparseRows;
for( int i=0; i<rlen; i++ )
{
if( b[i] != null && !b[i].isEmpty() )
{
if( a[i] == null || a[i].isEmpty() ) {
//copy entire sparse row (no sort required)
a[i] = new SparseRow(b[i]);
}
else
{
boolean appended = false;
SparseRow brow = b[i];
int blen = brow.size();
int[] bix = brow.getIndexContainer();
double[] bval = brow.getValueContainer();
for( int j=0; j<blen; j++ ) {
if( bval[j] != 0 ) {
this.appendValue(i, bix[j], bval[j]);
appended = true;
}
}
//only sort if value appended
if( !appendOnly && appended )
this.sparseRows[i].sort();
}
}
}
}
else //SPARSE <- DENSE
{
double[] b = that.denseBlock;
for( int i=0, bix=0; i<rlen; i++, bix+=clen )
{
boolean appended = false;
for( int j=0; j<clen; j++ ) {
if( b[bix+j] != 0 ) {
this.appendValue(i, j, b[bix+j]);
appended = true;
}
}
//only sort if value appended
if( !appendOnly && appended )
this.sparseRows[i].sort();
}
}
}
////////
// Input/Output functions
@Override
public void readFields(DataInput in)
throws IOException
{
//read basic header (int rlen, int clen, byte type)
rlen = in.readInt();
clen = in.readInt();
byte bformat = in.readByte();
//check type information
if( bformat<0 || bformat>=BlockType.values().length )
throw new IOException("invalid format: '"+bformat+"' (need to be 0-"+BlockType.values().length+").");
BlockType format=BlockType.values()[bformat];
try
{
switch(format)
{
case ULTRA_SPARSE_BLOCK:
nonZeros = readNnzInfo( in, true );
sparse = evalSparseFormatInMemory(rlen, clen, nonZeros);
cleanupBlock(true, true); //clean all
if( sparse )
readUltraSparseBlock(in);
else
readUltraSparseToDense(in);
break;
case SPARSE_BLOCK:
nonZeros = readNnzInfo( in, false );
sparse = evalSparseFormatInMemory(rlen, clen, nonZeros);
cleanupBlock(sparse, !sparse);
if( sparse )
readSparseBlock(in);
else
readSparseToDense(in);
break;
case DENSE_BLOCK:
sparse = false;
cleanupBlock(false, true); //reuse dense
readDenseBlock(in); //always dense in-mem if dense on disk
break;
case EMPTY_BLOCK:
sparse = true;
cleanupBlock(true, true); //clean all
nonZeros = 0;
break;
}
}
catch(DMLRuntimeException ex)
{
throw new IOException("Error reading block of type '"+format.toString()+"'.", ex);
}
}
/**
*
* @param in
* @throws IOException
* @throws DMLRuntimeException
*/
private void readDenseBlock(DataInput in)
throws IOException, DMLRuntimeException
{
allocateDenseBlock(true); //allocate block, clear nnz
int limit = rlen*clen;
if( in instanceof MatrixBlockDataInput ) //fast deserialize
{
MatrixBlockDataInput mbin = (MatrixBlockDataInput)in;
nonZeros = mbin.readDoubleArray(limit, denseBlock);
}
else if( in instanceof DataInputBuffer && MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION )
{
//workaround because sequencefile.reader.next(key, value) does not yet support serialization framework
DataInputBuffer din = (DataInputBuffer)in;
MatrixBlockDataInput mbin = new FastBufferedDataInputStream(din);
nonZeros = mbin.readDoubleArray(limit, denseBlock);
((FastBufferedDataInputStream)mbin).close();
}
else //default deserialize
{
for( int i=0; i<limit; i++ )
{
denseBlock[i]=in.readDouble();
if(denseBlock[i]!=0)
nonZeros++;
}
}
}
/**
*
* @param in
* @throws IOException
*/
private void readSparseBlock(DataInput in)
throws IOException
{
allocateSparseRowsBlock();
resetSparse(); //reset all sparse rows
if( in instanceof MatrixBlockDataInput ) //fast deserialize
{
MatrixBlockDataInput mbin = (MatrixBlockDataInput)in;
nonZeros = mbin.readSparseRows(rlen, sparseRows);
}
else if( in instanceof DataInputBuffer && MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION )
{
//workaround because sequencefile.reader.next(key, value) does not yet support serialization framework
DataInputBuffer din = (DataInputBuffer)in;
MatrixBlockDataInput mbin = new FastBufferedDataInputStream(din);
nonZeros = mbin.readSparseRows(rlen, sparseRows);
((FastBufferedDataInputStream)mbin).close();
}
else //default deserialize
{
for(int r=0; r<rlen; r++)
{
int nr=in.readInt();
if(nr==0)
{
if(sparseRows[r]!=null)
sparseRows[r].reset(estimatedNNzsPerRow, clen);
continue;
}
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(nr);
else
sparseRows[r].reset(nr, clen);
for(int j=0; j<nr; j++)
sparseRows[r].append(in.readInt(), in.readDouble());
}
}
}
/**
*
* @param in
* @throws IOException
* @throws DMLRuntimeException
*/
private void readSparseToDense(DataInput in)
throws IOException, DMLRuntimeException
{
allocateDenseBlock(false); //allocate block
Arrays.fill(denseBlock, 0);
for(int r=0; r<rlen; r++)
{
int nr = in.readInt();
for( int j=0; j<nr; j++ )
{
int c = in.readInt();
double val = in.readDouble();
denseBlock[r*clen+c] = val;
}
}
}
/**
*
* @param in
* @throws IOException
*/
private void readUltraSparseBlock(DataInput in)
throws IOException
{
allocateSparseRowsBlock(false); //adjust to size
resetSparse(); //reset all sparse rows
for(long i=0; i<nonZeros; i++)
{
int r = in.readInt();
int c = in.readInt();
double val = in.readDouble();
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(1,clen);
sparseRows[r].append(c, val);
}
}
/**
*
* @param in
* @throws IOException
* @throws DMLRuntimeException
*/
private void readUltraSparseToDense(DataInput in)
throws IOException, DMLRuntimeException
{
allocateDenseBlock(false); //allocate block
Arrays.fill(denseBlock, 0);
for(long i=0; i<nonZeros; i++)
{
int r = in.readInt();
int c = in.readInt();
double val = in.readDouble();
denseBlock[r*clen+c] = val;
}
}
@Override
public void write(DataOutput out)
throws IOException
{
//determine format
boolean sparseSrc = sparse;
boolean sparseDst = evalSparseFormatOnDisk();
//write first part of header
out.writeInt(rlen);
out.writeInt(clen);
if( sparseSrc )
{
//write sparse to *
if( sparseRows==null || nonZeros==0 )
writeEmptyBlock(out);
else if( nonZeros<rlen && sparseDst )
writeSparseToUltraSparse(out);
else if( sparseDst )
writeSparseBlock(out);
else
writeSparseToDense(out);
}
else
{
//write dense to *
if( denseBlock==null || nonZeros==0 )
writeEmptyBlock(out);
else if( nonZeros<rlen && sparseDst )
writeDenseToUltraSparse(out);
else if( sparseDst )
writeDenseToSparse(out);
else
writeDenseBlock(out);
}
}
/**
*
* @param out
* @throws IOException
*/
private void writeEmptyBlock(DataOutput out)
throws IOException
{
//empty blocks do not need to materialize row information
out.writeByte( BlockType.EMPTY_BLOCK.ordinal() );
}
/**
*
* @param out
* @throws IOException
*/
private void writeDenseBlock(DataOutput out)
throws IOException
{
out.writeByte( BlockType.DENSE_BLOCK.ordinal() );
int limit=rlen*clen;
if( out instanceof MatrixBlockDataOutput ) //fast serialize
((MatrixBlockDataOutput)out).writeDoubleArray(limit, denseBlock);
else //general case (if fast serialize not supported)
for(int i=0; i<limit; i++)
out.writeDouble(denseBlock[i]);
}
/**
*
* @param out
* @throws IOException
*/
private void writeSparseBlock(DataOutput out)
throws IOException
{
out.writeByte( BlockType.SPARSE_BLOCK.ordinal() );
writeNnzInfo( out, false );
if( out instanceof MatrixBlockDataOutput ) //fast serialize
((MatrixBlockDataOutput)out).writeSparseRows(rlen, sparseRows);
else //general case (if fast serialize not supported)
{
int r=0;
for(;r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
out.writeInt(0);
else
{
int nr=sparseRows[r].size();
out.writeInt(nr);
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int j=0; j<nr; j++)
{
out.writeInt(cols[j]);
out.writeDouble(values[j]);
}
}
}
for(;r<rlen; r++)
out.writeInt(0);
}
}
/**
*
* @param out
* @throws IOException
*/
private void writeSparseToUltraSparse(DataOutput out)
throws IOException
{
out.writeByte( BlockType.ULTRA_SPARSE_BLOCK.ordinal() );
writeNnzInfo( out, true );
long wnnz = 0;
for(int r=0;r<Math.min(rlen, sparseRows.length); r++)
if(sparseRows[r]!=null && !sparseRows[r].isEmpty() )
{
int alen = sparseRows[r].size();
int[] aix = sparseRows[r].getIndexContainer();
double[] avals = sparseRows[r].getValueContainer();
for(int j=0; j<alen; j++) {
out.writeInt(r);
out.writeInt(aix[j]);
out.writeDouble(avals[j]);
wnnz++;
}
}
//validity check (nnz must exactly match written nnz)
if( nonZeros != wnnz )
throw new IOException("Invalid number of serialized non-zeros: "+wnnz+" (expected: "+nonZeros+")");
}
/**
*
* @param out
* @throws IOException
*/
private void writeSparseToDense(DataOutput out)
throws IOException
{
//write block type 'dense'
out.writeByte( BlockType.DENSE_BLOCK.ordinal() );
//write data (from sparse to dense)
if( sparseRows==null ) //empty block
for( int i=0; i<rlen*clen; i++ )
out.writeDouble(0);
else //existing sparse block
{
for( int i=0; i<rlen; i++ )
{
if( i<sparseRows.length && sparseRows[i]!=null && !sparseRows[i].isEmpty() )
{
SparseRow arow = sparseRows[i];
int alen = arow.size();
int[] aix = arow.getIndexContainer();
double[] avals = arow.getValueContainer();
//foreach non-zero value, fill with 0s if required
for( int j=0, j2=0; j2<alen; j++, j2++ ) {
for( ; j<aix[j2]; j++ )
out.writeDouble( 0 );
out.writeDouble( avals[j2] );
}
//remaining 0 values in row
for( int j=aix[alen-1]+1; j<clen; j++)
out.writeDouble( 0 );
}
else //empty row
for( int j=0; j<clen; j++ )
out.writeDouble( 0 );
}
}
}
/**
*
* @param out
* @throws IOException
*/
private void writeDenseToUltraSparse(DataOutput out) throws IOException
{
out.writeByte( BlockType.ULTRA_SPARSE_BLOCK.ordinal() );
writeNnzInfo( out, true );
long wnnz = 0;
for(int r=0, ix=0; r<rlen; r++)
for(int c=0; c<clen; c++, ix++)
if( denseBlock[ix]!=0 )
{
out.writeInt(r);
out.writeInt(c);
out.writeDouble(denseBlock[ix]);
wnnz++;
}
//validity check (nnz must exactly match written nnz)
if( nonZeros != wnnz )
throw new IOException("Invalid number of serialized non-zeros: "+wnnz+" (expected: "+nonZeros+")");
}
/**
*
* @param out
* @throws IOException
*/
private void writeDenseToSparse(DataOutput out)
throws IOException
{
out.writeByte( BlockType.SPARSE_BLOCK.ordinal() ); //block type
writeNnzInfo( out, false );
int start=0;
for(int r=0; r<rlen; r++)
{
//count nonzeros
int nr=0;
for(int i=start; i<start+clen; i++)
if(denseBlock[i]!=0.0)
nr++;
out.writeInt(nr);
for(int c=0; c<clen; c++)
{
if(denseBlock[start]!=0.0)
{
out.writeInt(c);
out.writeDouble(denseBlock[start]);
}
start++;
}
}
}
/**
*
* @param in
* @throws IOException
*/
private long readNnzInfo( DataInput in, boolean ultrasparse )
throws IOException
{
//note: if ultrasparse, int always sufficient because nnz<rlen
// where rlen is limited to integer
long lrlen = (long)rlen;
long lclen = (long)clen;
//read long if required, otherwise int (see writeNnzInfo, consistency required)
if( lrlen*lclen > Integer.MAX_VALUE && !ultrasparse) {
nonZeros = in.readLong();
}
else {
nonZeros = in.readInt();
}
return nonZeros;
}
/**
*
* @param out
* @throws IOException
*/
private void writeNnzInfo( DataOutput out, boolean ultrasparse )
throws IOException
{
//note: if ultrasparse, int always sufficient because nnz<rlen
// where rlen is limited to integer
long lrlen = (long)rlen;
long lclen = (long)clen;
//write long if required, otherwise int
if( lrlen*lclen > Integer.MAX_VALUE && !ultrasparse) {
out.writeLong( nonZeros );
}
else {
out.writeInt( (int)nonZeros );
}
}
/**
* NOTE: The used estimates must be kept consistent with the respective write functions.
*
* @return
*/
public long getExactSizeOnDisk()
{
//determine format
boolean sparseSrc = sparse;
boolean sparseDst = evalSparseFormatOnDisk();
long lrlen = (long) rlen;
long lclen = (long) clen;
long lnonZeros = (long) nonZeros;
//ensure exact size estimates for write
if( lnonZeros <= 0 )
{
recomputeNonZeros();
lnonZeros = (long) nonZeros;
}
//get exact size estimate (see write for the corresponding meaning)
if( sparseSrc )
{
//write sparse to *
if(sparseRows==null || lnonZeros==0)
return HEADER_SIZE; //empty block
else if( lnonZeros<lrlen && sparseDst )
return estimateSizeUltraSparseOnDisk(lrlen, lclen, lnonZeros); //ultra sparse block
else if( sparseDst )
return estimateSizeSparseOnDisk(lrlen, lclen, lnonZeros); //sparse block
else
return estimateSizeDenseOnDisk(lrlen, lclen); //dense block
}
else
{
//write dense to *
if(denseBlock==null || lnonZeros==0)
return HEADER_SIZE; //empty block
else if( lnonZeros<lrlen && sparseDst )
return estimateSizeUltraSparseOnDisk(lrlen, lclen, lnonZeros); //ultra sparse block
else if( sparseDst )
return estimateSizeSparseOnDisk(lrlen, lclen, lnonZeros); //sparse block
else
return estimateSizeDenseOnDisk(lrlen, lclen); //dense block
}
}
////////
// Estimates size and sparsity
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
public static long estimateSizeInMemory(long nrows, long ncols, double sparsity)
{
//determine sparse/dense representation
boolean sparse = evalSparseFormatInMemory(nrows, ncols, (long)(sparsity*nrows*ncols));
//estimate memory consumption for sparse/dense
if( sparse )
return estimateSizeSparseInMemory(nrows, ncols, sparsity);
else
return estimateSizeDenseInMemory(nrows, ncols);
}
/**
*
* @param nrows
* @param ncols
* @return
*/
private static long estimateSizeDenseInMemory(long nrows, long ncols)
{
// basic variables and references sizes
long size = 44;
// core dense matrix block (double array)
size += nrows * ncols * 8;
return size;
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
private static long estimateSizeSparseInMemory(long nrows, long ncols, double sparsity)
{
// basic variables and references sizes
long size = 44;
//NOTES:
// * Each sparse row has a fixed overhead of 8B (reference) + 32B (object) +
// 12B (3 int members), 32B (overhead int array), 32B (overhead double array),
// * Each non-zero value requires 12B for the column-index/value pair.
// * Overheads for arrays, objects, and references refer to 64bit JVMs
// * If nnz < than rows we have only also empty rows.
//account for sparsity and initial capacity
long cnnz = Math.max(SparseRow.initialCapacity, (long)Math.ceil(sparsity*ncols));
long rlen = Math.min(nrows, (long) Math.ceil(sparsity*nrows*ncols));
size += rlen * ( 116 + 12 * cnnz ); //sparse row
size += nrows * 8; //empty rows
//OLD ESTIMATE:
//int len = Math.max(SparseRow.initialCapacity, (int)Math.ceil(sparsity*ncols));
//size += nrows * (28 + 12 * len );
return size;
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
public static long estimateSizeOnDisk( long nrows, long ncols, long nnz )
{
//determine sparse/dense representation
boolean sparse = evalSparseFormatOnDisk(nrows, ncols, nnz);
//estimate memory consumption for sparse/dense
if( sparse && nnz<nrows )
return estimateSizeUltraSparseOnDisk(nrows, ncols, nnz);
else if( sparse )
return estimateSizeSparseOnDisk(nrows, ncols, nnz);
else
return estimateSizeDenseOnDisk(nrows, ncols);
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
private static long estimateSizeDenseOnDisk( long nrows, long ncols)
{
//basic header (int rlen, int clen, byte type)
long size = HEADER_SIZE;
//data (all cells double)
size += nrows * ncols * 8;
return size;
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
private static long estimateSizeSparseOnDisk( long nrows, long ncols, long nnz )
{
//basic header: (int rlen, int clen, byte type)
long size = HEADER_SIZE;
//extended header (long nnz)
size += (nrows*ncols > Integer.MAX_VALUE) ? 8 : 4;
//data: (int num per row, int-double pair per non-zero value)
size += nrows * 4 + nnz * 12;
return size;
}
/**
*
* @param nrows
* @param ncols
* @param sparsity
* @return
*/
private static long estimateSizeUltraSparseOnDisk( long nrows, long ncols, long nnz )
{
//basic header (int rlen, int clen, byte type)
long size = HEADER_SIZE;
//extended header (int nnz, guaranteed by rlen<nnz)
size += 4;
//data (int-int-double triples per non-zero value)
size += nnz * 16;
return size;
}
public static SparsityEstimate estimateSparsityOnAggBinary(MatrixBlock m1, MatrixBlock m2, AggregateBinaryOperator op)
{
//NOTE: since MatrixMultLib always uses a dense intermediate output
//with subsequent check for sparsity, we should always return a dense estimate.
//Once, we support more aggregate binary operations, we need to change this.
return new SparsityEstimate(false, m1.getNumRows()*m2.getNumRows());
/*
SparsityEstimate est=new SparsityEstimate();
double m=m2.getNumColumns();
//handle vectors specially
//if result is a column vector, use dense format, otherwise use the normal process to decide
if ( !op.sparseSafe || m <=SKINNY_MATRIX_TURN_POINT)
{
est.sparse=false;
}
else
{
double n=m1.getNumRows();
double k=m1.getNumColumns();
double nz1=m1.getNonZeros();
double nz2=m2.getNonZeros();
double pq=nz1*nz2/n/k/k/m;
double estimated= 1-Math.pow(1-pq, k);
est.sparse=(estimated < SPARCITY_TURN_POINT);
est.estimatedNonZeros=(int)(estimated*n*m);
}
return est;
*/
}
/**
*
* @param m1
* @param m2
* @param op
* @return
*/
private static SparsityEstimate estimateSparsityOnBinary(MatrixBlock m1, MatrixBlock m2, BinaryOperator op)
{
SparsityEstimate est=new SparsityEstimate();
//if result is a column vector, use dense format, otherwise use the normal process to decide
if(!op.sparseSafe ) {
est.sparse = false;
return est;
}
BinaryAccessType atype = LibMatrixBincell.getBinaryAccessType(m1, m2);
boolean outer = (atype == BinaryAccessType.OUTER_VECTOR_VECTOR);
long m = m1.getNumRows();
long n = outer ? m2.getNumColumns() : m1.getNumColumns();
long nz1 = m1.getNonZeros();
long nz2 = m2.getNonZeros();
//account for matrix vector and vector/vector
long estnnz = 0;
if( atype == BinaryAccessType.OUTER_VECTOR_VECTOR )
{
//for outer vector operations the sparsity estimate is exactly known
estnnz = nz1 * nz2;
}
else //DEFAULT CASE
{
if( atype == BinaryAccessType.MATRIX_COL_VECTOR )
nz2 = nz2 * n;
else if( atype == BinaryAccessType.MATRIX_ROW_VECTOR )
nz2 = nz2 * m;
if(op.fn instanceof And || op.fn instanceof Multiply)
estnnz = Math.min(nz1, nz2); //worstcase wrt overlap
else
estnnz = nz1+nz2; //worstcase wrt operation
}
est.sparse = evalSparseFormatInMemory(m, n, estnnz);
est.estimatedNonZeros = estnnz;
return est;
}
private boolean estimateSparsityOnSlice(int selectRlen, int selectClen, int finalRlen, int finalClen)
{
long ennz = (long)((double)nonZeros/rlen/clen*selectRlen*selectClen);
return evalSparseFormatInMemory(finalRlen, finalClen, ennz);
}
private boolean estimateSparsityOnLeftIndexing(long rlenm1, long clenm1, long nnzm1, long rlenm2, long clenm2, long nnzm2)
{
//min bound: nnzm1 - rlenm2*clenm2 + nnzm2
//max bound: min(rlenm1*rlenm2, nnzm1+nnzm2)
long ennz = Math.min(rlenm1*clenm1, nnzm1+nnzm2);
return evalSparseFormatInMemory(rlenm1, clenm1, ennz);
}
private boolean estimateSparsityOnGroupedAgg( long rlen, long groups )
{
long ennz = Math.min(groups, rlen);
return evalSparseFormatInMemory(groups, 1, ennz);
}
////////
// Core block operations (called from instructions)
public MatrixValue scalarOperations(ScalarOperator op, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock ret = checkType(result);
// estimate the sparsity structure of result matrix
boolean sp = this.sparse; // by default, we guess result.sparsity=input.sparsity
if (!op.sparseSafe)
sp = false; // if the operation is not sparse safe, then result will be in dense format
//allocate the output matrix block
if( ret==null )
ret = new MatrixBlock(rlen, clen, sp, this.nonZeros);
else
ret.reset(rlen, clen, sp, this.nonZeros);
//core scalar operations
LibMatrixBincell.bincellOp(this, ret, op);
return ret;
}
public MatrixValue unaryOperations(UnaryOperator op, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
checkType(result);
// estimate the sparsity structure of result matrix
boolean sp = this.sparse; // by default, we guess result.sparsity=input.sparsity
if (!op.sparseSafe)
sp = false; // if the operation is not sparse safe, then result will be in dense format
//allocate output
if(result==null)
result=new MatrixBlock(rlen, clen, sp, this.nonZeros);
else
result.reset(rlen, clen, sp);
//core execute
if( LibMatrixAgg.isSupportedUnaryOperator(op) ) //e.g., cumsum
{
LibMatrixAgg.aggregateUnaryMatrix(this, (MatrixBlock)result, op);
}
else
{
result.copy(this);
((MatrixBlock)result).unaryOperationsInPlace(op);
}
return result;
}
public void unaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if(op.sparseSafe)
sparseUnaryOperationsInPlace(op);
else
denseUnaryOperationsInPlace(op);
}
/**
* only apply to non zero cells
*
* @param op
* @throws DMLUnsupportedOperationException
* @throws DMLRuntimeException
*/
private void sparseUnaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//early abort possible since sparse-safe
if( isEmptyBlock(false) )
return;
if(sparse)
{
nonZeros=0;
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
double[] values=sparseRows[r].getValueContainer();
int[] cols=sparseRows[r].getIndexContainer();
int pos=0;
for(int i=0; i<sparseRows[r].size(); i++)
{
double v=op.fn.execute(values[i]);
if(v!=0)
{
values[pos]=v;
cols[pos]=cols[i];
pos++;
nonZeros++;
}
}
sparseRows[r].truncate(pos);
}
}
else
{
int limit=rlen*clen;
nonZeros=0;
for(int i=0; i<limit; i++)
{
denseBlock[i]=op.fn.execute(denseBlock[i]);
if(denseBlock[i]!=0)
nonZeros++;
}
}
}
private void denseUnaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if( sparse ) //SPARSE MATRIX
{
double v;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
v=op.fn.execute(quickGetValue(r, c));
quickSetValue(r, c, v);
}
}
else//DENSE MATRIX
{
//early abort not possible because not sparsesafe
if(denseBlock==null)
allocateDenseBlock();
int limit=rlen*clen;
nonZeros=0;
for(int i=0; i<limit; i++)
{
denseBlock[i]=op.fn.execute(denseBlock[i]);
if(denseBlock[i]!=0)
nonZeros++;
}
}
}
/**
*
*/
public MatrixValue binaryOperations(BinaryOperator op, MatrixValue thatValue, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that = checkType(thatValue);
MatrixBlock ret = checkType(result);
if( !LibMatrixBincell.isValidDimensionsBinary(this, that) ) {
throw new RuntimeException("block sizes are not matched for binary " +
"cell operations: "+this.rlen+"x"+this.clen+" vs "+ that.rlen+"x"+that.clen);
}
//compute output dimensions
boolean outer = (LibMatrixBincell.getBinaryAccessType(this, that)
== BinaryAccessType.OUTER_VECTOR_VECTOR);
int rows = rlen;
int cols = outer ? that.clen : clen;
//estimate output sparsity
SparsityEstimate resultSparse = estimateSparsityOnBinary(this, that, op);
if( ret == null )
ret = new MatrixBlock(rows, cols, resultSparse.sparse, resultSparse.estimatedNonZeros);
else
ret.reset(rows, cols, resultSparse.sparse, resultSparse.estimatedNonZeros);
//core binary cell operation
LibMatrixBincell.bincellOp( this, that, ret, op );
return ret;
}
/**
*
*/
public void binaryOperationsInPlace(BinaryOperator op, MatrixValue thatValue)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that=checkType(thatValue);
if( !LibMatrixBincell.isValidDimensionsBinary(this, that) ) {
throw new RuntimeException("block sizes are not matched for binary " +
"cell operations: "+this.rlen+"*"+this.clen+" vs "+ that.rlen+"*"+that.clen);
}
//estimate output sparsity
SparsityEstimate resultSparse = estimateSparsityOnBinary(this, that, op);
if(resultSparse.sparse && !this.sparse)
denseToSparse();
else if(!resultSparse.sparse && this.sparse)
sparseToDense();
//core binary cell operation
LibMatrixBincell.bincellOpInPlace(this, that, op);
}
public void incrementalAggregate(AggregateOperator aggOp, MatrixValue correction,
MatrixValue newWithCorrection)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//assert(aggOp.correctionExists);
MatrixBlock cor=checkType(correction);
MatrixBlock newWithCor=checkType(newWithCorrection);
KahanObject buffer=new KahanObject(0, 0);
if(aggOp.correctionLocation==CorrectionLocationType.LASTROW)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=cor.quickGetValue(0, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c),
newWithCor.quickGetValue(r+1, c));
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(0, c, buffer._correction);
}
}else if(aggOp.correctionLocation==CorrectionLocationType.LASTCOLUMN)
{
if(aggOp.increOp.fn instanceof Builtin
&& ( ((Builtin)(aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MAXINDEX
|| ((Builtin)(aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MININDEX )
){
// *** HACK ALERT *** HACK ALERT *** HACK ALERT ***
// rowIndexMax() and its siblings don't fit very well into the standard
// aggregate framework. We (ab)use the "correction factor" argument to
// hold the maximum value in each row/column.
// The execute() method for this aggregate takes as its argument
// two candidates for the highest value. Bookkeeping about
// indexes (return column/row index with highest value, breaking
// ties in favor of higher indexes) is handled in this function.
// Note that both versions of incrementalAggregate() contain
// very similar blocks of special-case code. If one block is
// modified, the other needs to be changed to match.
for(int r=0; r<rlen; r++){
double currMaxValue = cor.quickGetValue(r, 0);
long newMaxIndex = (long)newWithCor.quickGetValue(r, 0);
double newMaxValue = newWithCor.quickGetValue(r, 1);
double update = aggOp.increOp.fn.execute(newMaxValue, currMaxValue);
if (2.0 == update) {
// Return value of 2 ==> both values the same, break ties
// in favor of higher index.
long curMaxIndex = (long) quickGetValue(r,0);
quickSetValue(r, 0, Math.max(curMaxIndex, newMaxIndex));
} else if(1.0 == update){
// Return value of 1 ==> new value is better; use its index
quickSetValue(r, 0, newMaxIndex);
cor.quickSetValue(r, 0, newMaxValue);
} else {
// Other return value ==> current answer is best
}
}
// *** END HACK ***
}else{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=cor.quickGetValue(r, 0);;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c), newWithCor.quickGetValue(r, c+1));
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(r, 0, buffer._correction);
}
}
}
else if(aggOp.correctionLocation==CorrectionLocationType.NONE)
{
//e.g., ak+ kahan plus as used in sum, mapmult, mmcj and tsmm
if(aggOp.increOp.fn instanceof KahanPlus)
{
LibMatrixAgg.aggregateBinaryMatrix(newWithCor, this, cor);
}
else
{
if( newWithCor.isInSparseFormat() && aggOp.sparseSafe ) //SPARSE
{
SparseRow[] bRows = newWithCor.getSparseRows();
if( bRows==null ) //early abort on empty block
return;
for( int r=0; r<Math.min(rlen, bRows.length); r++ )
{
SparseRow brow = bRows[r];
if( brow != null && !brow.isEmpty() )
{
int blen = brow.size();
int[] bix = brow.getIndexContainer();
double[] bvals = brow.getValueContainer();
for( int j=0; j<blen; j++)
{
int c = bix[j];
buffer._sum = this.quickGetValue(r, c);
buffer._correction = cor.quickGetValue(r, c);
buffer = (KahanObject) aggOp.increOp.fn.execute(buffer, bvals[j]);
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(r, c, buffer._correction);
}
}
}
}
else //DENSE or SPARSE (!sparsesafe)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=cor.quickGetValue(r, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c));
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(r, c, buffer._correction);
}
}
//change representation if required
//(note since ak+ on blocks is currently only applied in MR, hence no need to account for this in mem estimates)
examSparsity();
}
}
else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOROWS)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
n=cor.quickGetValue(0, c);
buffer._correction=cor.quickGetValue(1, c);
mu2=newWithCor.quickGetValue(r, c);
n2=newWithCor.quickGetValue(r+1, c);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(0, c, n);
cor.quickSetValue(1, c, buffer._correction);
}
}else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
n=cor.quickGetValue(r, 0);
buffer._correction=cor.quickGetValue(r, 1);
mu2=newWithCor.quickGetValue(r, c);
n2=newWithCor.quickGetValue(r, c+1);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
quickSetValue(r, c, buffer._sum);
cor.quickSetValue(r, 0, n);
cor.quickSetValue(r, 1, buffer._correction);
}
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}
public void incrementalAggregate(AggregateOperator aggOp, MatrixValue newWithCorrection)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//assert(aggOp.correctionExists);
MatrixBlock newWithCor=checkType(newWithCorrection);
KahanObject buffer=new KahanObject(0, 0);
if(aggOp.correctionLocation==CorrectionLocationType.LASTROW)
{
if( aggOp.increOp.fn instanceof KahanPlus )
{
LibMatrixAgg.aggregateBinaryMatrix(newWithCor, this, aggOp);
}
else
{
for(int r=0; r<rlen-1; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=this.quickGetValue(r+1, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c),
newWithCor.quickGetValue(r+1, c));
quickSetValue(r, c, buffer._sum);
quickSetValue(r+1, c, buffer._correction);
}
}
}
else if(aggOp.correctionLocation==CorrectionLocationType.LASTCOLUMN)
{
if(aggOp.increOp.fn instanceof Builtin
&& ( ((Builtin)(aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MAXINDEX
|| ((Builtin)(aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MININDEX)
){
// *** HACK ALERT *** HACK ALERT *** HACK ALERT ***
// rowIndexMax() and its siblings don't fit very well into the standard
// aggregate framework. We (ab)use the "correction factor" argument to
// hold the maximum value in each row/column.
// The execute() method for this aggregate takes as its argument
// two candidates for the highest value. Bookkeeping about
// indexes (return column/row index with highest value, breaking
// ties in favor of higher indexes) is handled in this function.
// Note that both versions of incrementalAggregate() contain
// very similar blocks of special-case code. If one block is
// modified, the other needs to be changed to match.
for(int r = 0; r < rlen; r++){
double currMaxValue = quickGetValue(r, 1);
long newMaxIndex = (long)newWithCor.quickGetValue(r, 0);
double newMaxValue = newWithCor.quickGetValue(r, 1);
double update = aggOp.increOp.fn.execute(newMaxValue, currMaxValue);
if (2.0 == update) {
// Return value of 2 ==> both values the same, break ties
// in favor of higher index.
long curMaxIndex = (long) quickGetValue(r,0);
quickSetValue(r, 0, Math.max(curMaxIndex, newMaxIndex));
} else if(1.0 == update){
// Return value of 1 ==> new value is better; use its index
quickSetValue(r, 0, newMaxIndex);
quickSetValue(r, 1, newMaxValue);
} else {
// Other return value ==> current answer is best
}
}
// *** END HACK ***
}
else
{
if(aggOp.increOp.fn instanceof KahanPlus)
{
LibMatrixAgg.aggregateBinaryMatrix(newWithCor, this, aggOp);
}
else
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen-1; c++)
{
buffer._sum=this.quickGetValue(r, c);
buffer._correction=this.quickGetValue(r, c+1);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.quickGetValue(r, c), newWithCor.quickGetValue(r, c+1));
quickSetValue(r, c, buffer._sum);
quickSetValue(r, c+1, buffer._correction);
}
}
}
}/*else if(aggOp.correctionLocation==0)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
//buffer._sum=this.getValue(r, c);
//buffer._correction=0;
//buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.getValue(r, c));
setValue(r, c, this.getValue(r, c)+newWithCor.getValue(r, c));
}
}*/else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOROWS)
{
double n, n2, mu2;
for(int r=0; r<rlen-2; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.quickGetValue(r, c);
n=this.quickGetValue(r+1, c);
buffer._correction=this.quickGetValue(r+2, c);
mu2=newWithCor.quickGetValue(r, c);
n2=newWithCor.quickGetValue(r+1, c);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
quickSetValue(r, c, buffer._sum);
quickSetValue(r+1, c, n);
quickSetValue(r+2, c, buffer._correction);
}
}else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen-2; c++)
{
buffer._sum=this.quickGetValue(r, c);
n=this.quickGetValue(r, c+1);
buffer._correction=this.quickGetValue(r, c+2);
mu2=newWithCor.quickGetValue(r, c);
n2=newWithCor.quickGetValue(r, c+1);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
quickSetValue(r, c, buffer._sum);
quickSetValue(r, c+1, n);
quickSetValue(r, c+2, buffer._correction);
}
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}
@Override
public MatrixValue reorgOperations(ReorgOperator op, MatrixValue ret, int startRow, int startColumn, int length)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if ( !( op.fn instanceof SwapIndex || op.fn instanceof DiagIndex || op.fn instanceof SortIndex) )
throw new DMLRuntimeException("the current reorgOperations cannot support: "+op.fn.getClass()+".");
MatrixBlock result=checkType(ret);
CellIndex tempCellIndex = new CellIndex(-1,-1);
boolean reducedDim=op.fn.computeDimension(rlen, clen, tempCellIndex);
boolean sps;
if(reducedDim)
sps = false;
else if(op.fn.equals(DiagIndex.getDiagIndexFnObject()))
sps = true;
else
sps = this.evalSparseFormatInMemory(true);
if(result==null)
result=new MatrixBlock(tempCellIndex.row, tempCellIndex.column, sps, this.nonZeros);
else
result.reset(tempCellIndex.row, tempCellIndex.column, sps, this.nonZeros);
if( LibMatrixReorg.isSupportedReorgOperator(op) )
{
//SPECIAL case (operators with special performance requirements,
//or size-dependent special behavior)
//currently supported opcodes: r', rdiag, rsort
LibMatrixReorg.reorg(this, result, op);
}
else
{
//GENERIC case (any reorg operator)
CellIndex temp = new CellIndex(0, 0);
if(sparse)
{
if(sparseRows!=null)
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
tempCellIndex.set(r, cols[i]);
op.fn.execute(tempCellIndex, temp);
result.appendValue(temp.row, temp.column, values[i]);
}
}
}
}
else
{
if( denseBlock != null )
{
if( result.isInSparseFormat() ) //SPARSE<-DENSE
{
double[] a = denseBlock;
for( int i=0, aix=0; i<rlen; i++ )
for( int j=0; j<clen; j++, aix++ )
{
temp.set(i, j);
op.fn.execute(temp, temp);
result.appendValue(temp.row, temp.column, a[aix]);
}
}
else //DENSE<-DENSE
{
result.allocateDenseBlock();
Arrays.fill(result.denseBlock, 0);
double[] a = denseBlock;
double[] c = result.denseBlock;
int n = result.clen;
for( int i=0, aix=0; i<rlen; i++ )
for( int j=0; j<clen; j++, aix++ )
{
temp.set(i, j);
op.fn.execute(temp, temp);
c[temp.row*n+temp.column] = a[aix];
}
result.nonZeros = nonZeros;
}
}
}
}
return result;
}
/**
*
* @param that
* @param ret
* @return
* @throws DMLUnsupportedOperationException
* @throws DMLRuntimeException
*/
public MatrixBlock appendOperations( MatrixBlock that, MatrixBlock ret )
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock result = checkType( ret );
final int m = rlen;
final int n = clen+that.clen;
final long nnz = nonZeros+that.nonZeros;
boolean sp = evalSparseFormatInMemory(m, n, nnz);
//init result matrix
if( result == null )
result = new MatrixBlock(m, n, sp, nnz);
else
result.reset(m, n, sp, nnz);
//core append operation
//copy left and right input into output
if( !result.sparse ) //DENSE
{
result.copy(0, m-1, 0, clen-1, this, false);
result.copy(0, m-1, clen, n-1, that, false);
}
else //SPARSE
{
//adjust sparse rows if required
if( !this.isEmptyBlock(false) || !that.isEmptyBlock(false) )
result.allocateSparseRowsBlock();
result.appendToSparse(this, 0, 0);
result.appendToSparse(that, 0, clen);
}
result.nonZeros = nnz;
return result;
}
/**
*
* @param out
* @param tstype
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue transposeSelfMatrixMultOperations( MatrixBlock out, MMTSJType tstype )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//check for transpose type
if( !(tstype == MMTSJType.LEFT || tstype == MMTSJType.RIGHT) )
throw new DMLRuntimeException("Invalid MMTSJ type '"+tstype.toString()+"'.");
//compute matrix mult
boolean leftTranspose = ( tstype == MMTSJType.LEFT );
LibMatrixMult.matrixMultTransposeSelf(this, out, leftTranspose);
return out;
}
/**
*
* @param v
* @param w
* @param out
* @param ctype
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue chainMatrixMultOperations( MatrixBlock v, MatrixBlock w, MatrixBlock out, ChainType ctype )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
return chainMatrixMultOperations(v, w, out, ctype, 1);
}
/**
*
* @param v
* @param w
* @param out
* @param ctype
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue chainMatrixMultOperations( MatrixBlock v, MatrixBlock w, MatrixBlock out, ChainType ctype, int k )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//check for transpose type
if( !(ctype == ChainType.XtXv || ctype == ChainType.XtwXv) )
throw new DMLRuntimeException("Invalid mmchain type '"+ctype.toString()+"'.");
//check for matching dimensions
if( this.getNumColumns() != v.getNumRows() )
throw new DMLRuntimeException("Dimensions mismatch on mmchain operation ("+this.getNumColumns()+" != "+v.getNumRows()+")");
if( v!=null && v.getNumColumns() != 1 )
throw new DMLRuntimeException("Invalid input vector (column vector expected, but ncol="+v.getNumColumns()+")");
if( w!=null && w.getNumColumns() != 1 )
throw new DMLRuntimeException("Invalid weight vector (column vector expected, but ncol="+w.getNumColumns()+")");
//prepare result
if( out != null )
out.reset(clen, 1, false);
else
out = new MatrixBlock(clen, 1, false);
//compute matrix mult
if( k > 1 )
LibMatrixMult.matrixMultChain(this, v, w, out, ctype, k);
else
LibMatrixMult.matrixMultChain(this, v, w, out, ctype);
return out;
}
/**
*
* @param m1Val
* @param m2Val
* @param out1Val
* @param out2Val
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public void permutationMatrixMultOperations( MatrixValue m2Val, MatrixValue out1Val, MatrixValue out2Val )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//check input types and dimensions
MatrixBlock m2 = checkType(m2Val);
MatrixBlock ret1 = checkType(out1Val);
MatrixBlock ret2 = checkType(out2Val);
if(this.rlen!=m2.rlen)
throw new RuntimeException("Dimensions do not match for permutation matrix multiplication ("+this.rlen+"!="+m2.rlen+").");
//compute permutation matrix multiplication
LibMatrixMult.matrixMultPermute(this, m2, ret1, ret2);
}
/**
* Method to perform leftIndexing operation for a given lower and upper bounds in row and column dimensions.
* Updated matrix is returned as the output.
*
* Operations to be performed:
* 1) result=this;
* 2) result[rowLower:rowUpper, colLower:colUpper] = rhsMatrix;
*
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue leftIndexingOperations(MatrixValue rhsMatrix, long rowLower, long rowUpper,
long colLower, long colUpper, MatrixValue ret, boolean inplace)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
// Check the validity of bounds
if ( rowLower < 1 || rowLower > getNumRows() || rowUpper < rowLower || rowUpper > getNumRows()
|| colLower < 1 || colUpper > getNumColumns() || colUpper < colLower || colUpper > getNumColumns() ) {
throw new DMLRuntimeException("Invalid values for matrix indexing: " +
"["+rowLower+":"+rowUpper+"," + colLower+":"+colUpper+"] " +
"must be within matrix dimensions ["+getNumRows()+","+getNumColumns()+"].");
}
if ( (rowUpper-rowLower+1) < rhsMatrix.getNumRows() || (colUpper-colLower+1) < rhsMatrix.getNumColumns()) {
throw new DMLRuntimeException("Invalid values for matrix indexing: " +
"dimensions of the source matrix ["+rhsMatrix.getNumRows()+"x" + rhsMatrix.getNumColumns() + "] " +
"do not match the shape of the matrix specified by indices [" +
rowLower +":" + rowUpper + ", " + colLower + ":" + colUpper + "].");
}
MatrixBlock result=checkType(ret);
boolean sp = estimateSparsityOnLeftIndexing(rlen, clen, nonZeros,
rhsMatrix.getNumRows(), rhsMatrix.getNumColumns(), rhsMatrix.getNonZeros());
if( !inplace ) //general case
{
if(result==null)
result=new MatrixBlock(rlen, clen, sp);
else
result.reset(rlen, clen, sp);
result.copy(this, sp);
}
else //update in-place
result = this;
//NOTE conceptually we could directly use a zeroout and copy(..., false) but
// since this was factors slower, we still use a full copy and subsequently
// copy(..., true) - however, this can be changed in the future once we
// improved the performance of zeroout.
//result = (MatrixBlockDSM) zeroOutOperations(result, new IndexRange(rowLower,rowUpper, colLower, colUpper ), false);
int rl = (int)rowLower-1;
int ru = (int)rowUpper-1;
int cl = (int)colLower-1;
int cu = (int)colUpper-1;
MatrixBlock src = (MatrixBlock)rhsMatrix;
if(rl==ru && cl==cu) //specific case: cell update
{
//copy single value and update nnz
result.quickSetValue(rl, cl, src.quickGetValue(0, 0));
}
else //general case
{
//copy submatrix into result
result.copy(rl, ru, cl, cu, src, true);
}
return result;
}
/**
* Explicitly allow left indexing for scalars.
*
* * Operations to be performed:
* 1) result=this;
* 2) result[row,column] = scalar.getDoubleValue();
*
* @param scalar
* @param row
* @param col
* @param ret
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue leftIndexingOperations(ScalarObject scalar, long row, long col, MatrixValue ret, boolean inplace)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
MatrixBlock result=checkType(ret);
double inVal = scalar.getDoubleValue();
boolean sp = estimateSparsityOnLeftIndexing(rlen, clen, nonZeros, 1, 1, (inVal!=0)?1:0);
if( !inplace ) //general case
{
if(result==null)
result=new MatrixBlock(rlen, clen, sp);
else
result.reset(rlen, clen, sp);
result.copy(this, sp);
}
else //update in-place
result = this;
int rl = (int)row-1;
int cl = (int)col-1;
result.quickSetValue(rl, cl, inVal);
return result;
}
/**
* Method to perform rangeReIndex operation for a given lower and upper bounds in row and column dimensions.
* Extracted submatrix is returned as "result".
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue sliceOperations(long rowLower, long rowUpper, long colLower, long colUpper, MatrixValue ret)
throws DMLRuntimeException, DMLUnsupportedOperationException {
// check the validity of bounds
if ( rowLower < 1 || rowLower > getNumRows() || rowUpper < rowLower || rowUpper > getNumRows()
|| colLower < 1 || colUpper > getNumColumns() || colUpper < colLower || colUpper > getNumColumns() ) {
throw new DMLRuntimeException("Invalid values for matrix indexing: " +
"["+rowLower+":"+rowUpper+"," + colLower+":"+colUpper+"] " +
"must be within matrix dimensions ["+getNumRows()+","+getNumColumns()+"]");
}
int rl = (int)rowLower-1;
int ru = (int)rowUpper-1;
int cl = (int)colLower-1;
int cu = (int)colUpper-1;
//System.out.println(" -- performing slide on [" + getNumRows() + "x" + getNumColumns() + "] with ["+rl+":"+ru+","+cl+":"+cu+"].");
// Output matrix will have the same sparsity as that of the input matrix.
// (assuming a uniform distribution of non-zeros in the input)
MatrixBlock result=checkType(ret);
long estnnz= (long) ((double)this.nonZeros/rlen/clen*(ru-rl+1)*(cu-cl+1));
boolean result_sparsity = this.sparse && MatrixBlock.evalSparseFormatInMemory(ru-rl+1, cu-cl+1, estnnz);
if(result==null)
result=new MatrixBlock(ru-rl+1, cu-cl+1, result_sparsity, estnnz);
else
result.reset(ru-rl+1, cu-cl+1, result_sparsity, estnnz);
// actual slice operation
if( rowLower==1 && rowUpper==rlen && colLower==1 && colUpper==clen ) {
// copy if entire matrix required
result.copy( this );
}
else //general case
{
//core slicing operation (nnz maintained internally)
if (sparse)
sliceSparse(rl, ru, cl, cu, result);
else
sliceDense(rl, ru, cl, cu, result);
}
return result;
}
/**
*
* @param rl
* @param ru
* @param cl
* @param cu
* @param dest
* @throws DMLRuntimeException
*/
private void sliceSparse(int rl, int ru, int cl, int cu, MatrixBlock dest)
throws DMLRuntimeException
{
//check for early abort
if( isEmptyBlock(false) )
return;
if( cl==cu ) //COLUMN VECTOR
{
//note: always dense dest
dest.allocateDenseBlock();
for( int i=rl; i<=ru; i++ ) {
SparseRow arow = sparseRows[i];
if( arow != null && !arow.isEmpty() ) {
double val = arow.get(cl);
if( val != 0 ) {
dest.denseBlock[i-rl] = val;
dest.nonZeros++;
}
}
}
}
else if( rl==ru && cl==0 && cu==clen-1 ) //ROW VECTOR
{
//note: always sparse dest, but also works for dense
dest.appendRow(0, sparseRows[rl]);
}
else //general case (sparse/dense dest)
{
for(int i=rl; i <= ru; i++)
if(sparseRows[i] != null && !sparseRows[i].isEmpty())
{
SparseRow arow = sparseRows[i];
int alen = arow.size();
int[] aix = arow.getIndexContainer();
double[] avals = arow.getValueContainer();
int astart = (cl>0)?arow.searchIndexesFirstGTE(cl):0;
if( astart != -1 )
for( int j=astart; j<alen && aix[j] <= cu; j++ )
dest.appendValue(i-rl, aix[j]-cl, avals[j]);
}
}
}
/**
*
* @param rl
* @param ru
* @param cl
* @param cu
* @param dest
* @throws DMLRuntimeException
*/
private void sliceDense(int rl, int ru, int cl, int cu, MatrixBlock dest)
throws DMLRuntimeException
{
//ensure allocated input/output blocks
if( denseBlock == null )
return;
dest.allocateDenseBlock();
//indexing operation
if( cl==cu ) //COLUMN INDEXING
{
if( clen==1 ) //vector -> vector
{
System.arraycopy(denseBlock, rl, dest.denseBlock, 0, ru-rl+1);
}
else //matrix -> vector
{
//IBM JVM bug (JDK7) causes crash for certain cl/cu values (e.g., divide by zero for 4)
//for( int i=rl*clen+cl, ix=0; i<=ru*clen+cu; i+=clen, ix++ )
// dest.denseBlock[ix] = denseBlock[i];
int len = clen;
for( int i=rl*len+cl, ix=0; i<=ru*len+cu; i+=len, ix++ )
dest.denseBlock[ix] = denseBlock[i];
}
}
else // GENERAL RANGE INDEXING
{
//IBM JVM bug (JDK7) causes crash for certain cl/cu values (e.g., divide by zero for 4)
//for(int i = rl, ix1 = rl*clen+cl, ix2=0; i <= ru; i++, ix1+=clen, ix2+=dest.clen)
// System.arraycopy(denseBlock, ix1, dest.denseBlock, ix2, dest.clen);
int len1 = clen;
int len2 = dest.clen;
for(int i = rl, ix1 = rl*len1+cl, ix2=0; i <= ru; i++, ix1+=len1, ix2+=len2)
System.arraycopy(denseBlock, ix1, dest.denseBlock, ix2, len2);
}
//compute nnz of output (not maintained due to native calls)
dest.recomputeNonZeros();
}
public void sliceOperations(ArrayList<IndexedMatrixValue> outlist, IndexRange range, int rowCut, int colCut,
int normalBlockRowFactor, int normalBlockColFactor, int boundaryRlen, int boundaryClen)
{
MatrixBlock topleft=null, topright=null, bottomleft=null, bottomright=null;
Iterator<IndexedMatrixValue> p=outlist.iterator();
int blockRowFactor=normalBlockRowFactor, blockColFactor=normalBlockColFactor;
if(rowCut>range.rowEnd)
blockRowFactor=boundaryRlen;
if(colCut>range.colEnd)
blockColFactor=boundaryClen;
int minrowcut=(int)Math.min(rowCut,range.rowEnd);
int mincolcut=(int)Math.min(colCut, range.colEnd);
int maxrowcut=(int)Math.max(rowCut, range.rowStart);
int maxcolcut=(int)Math.max(colCut, range.colStart);
if(range.rowStart<rowCut && range.colStart<colCut)
{
topleft=(MatrixBlock) p.next().getValue();
//topleft.reset(blockRowFactor, blockColFactor,
// checkSparcityOnSlide(rowCut-(int)range.rowStart, colCut-(int)range.colStart, blockRowFactor, blockColFactor));
topleft.reset(blockRowFactor, blockColFactor,
estimateSparsityOnSlice(minrowcut-(int)range.rowStart, mincolcut-(int)range.colStart, blockRowFactor, blockColFactor));
}
if(range.rowStart<rowCut && range.colEnd>=colCut)
{
topright=(MatrixBlock) p.next().getValue();
topright.reset(blockRowFactor, boundaryClen,
estimateSparsityOnSlice(minrowcut-(int)range.rowStart, (int)range.colEnd-maxcolcut+1, blockRowFactor, boundaryClen));
}
if(range.rowEnd>=rowCut && range.colStart<colCut)
{
bottomleft=(MatrixBlock) p.next().getValue();
bottomleft.reset(boundaryRlen, blockColFactor,
estimateSparsityOnSlice((int)range.rowEnd-maxrowcut+1, mincolcut-(int)range.colStart, boundaryRlen, blockColFactor));
}
if(range.rowEnd>=rowCut && range.colEnd>=colCut)
{
bottomright=(MatrixBlock) p.next().getValue();
bottomright.reset(boundaryRlen, boundaryClen,
estimateSparsityOnSlice((int)range.rowEnd-maxrowcut+1, (int)range.colEnd-maxcolcut+1, boundaryRlen, boundaryClen));
}
if(sparse)
{
if(sparseRows!=null)
{
int r=(int)range.rowStart;
for(; r<Math.min(Math.min(rowCut, sparseRows.length), range.rowEnd+1); r++)
sliceHelp(r, range, colCut, topleft, topright, normalBlockRowFactor-rowCut, normalBlockRowFactor, normalBlockColFactor);
for(; r<=Math.min(range.rowEnd, sparseRows.length-1); r++)
sliceHelp(r, range, colCut, bottomleft, bottomright, -rowCut, normalBlockRowFactor, normalBlockColFactor);
//System.out.println("in: \n"+this);
//System.out.println("outlist: \n"+outlist);
}
}else
{
if(denseBlock!=null)
{
int i=((int)range.rowStart)*clen;
int r=(int) range.rowStart;
for(; r<Math.min(rowCut, range.rowEnd+1); r++)
{
int c=(int) range.colStart;
for(; c<Math.min(colCut, range.colEnd+1); c++)
topleft.appendValue(r+normalBlockRowFactor-rowCut, c+normalBlockColFactor-colCut, denseBlock[i+c]);
for(; c<=range.colEnd; c++)
topright.appendValue(r+normalBlockRowFactor-rowCut, c-colCut, denseBlock[i+c]);
i+=clen;
}
for(; r<=range.rowEnd; r++)
{
int c=(int) range.colStart;
for(; c<Math.min(colCut, range.colEnd+1); c++)
bottomleft.appendValue(r-rowCut, c+normalBlockColFactor-colCut, denseBlock[i+c]);
for(; c<=range.colEnd; c++)
bottomright.appendValue(r-rowCut, c-colCut, denseBlock[i+c]);
i+=clen;
}
}
}
}
private void sliceHelp(int r, IndexRange range, int colCut, MatrixBlock left, MatrixBlock right, int rowOffset, int normalBlockRowFactor, int normalBlockColFactor)
{
if(sparseRows[r]==null)
return;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
int start=sparseRows[r].searchIndexesFirstGTE((int)range.colStart);
if(start<0)
return;
int end=sparseRows[r].searchIndexesFirstLTE((int)range.colEnd);
if(end<0 || start>end)
return;
//actual slice operation
for(int i=start; i<=end; i++) {
if(cols[i]<colCut)
left.appendValue(r+rowOffset, cols[i]+normalBlockColFactor-colCut, values[i]);
else
right.appendValue(r+rowOffset, cols[i]-colCut, values[i]);
}
}
@Override
//This the append operations for MR side
//nextNCol is the number columns for the block right of block v2
public void appendOperations(MatrixValue v2,
ArrayList<IndexedMatrixValue> outlist, int blockRowFactor,
int blockColFactor, boolean m2IsLast, int nextNCol)
throws DMLUnsupportedOperationException, DMLRuntimeException {
MatrixBlock m2=(MatrixBlock)v2;
//System.out.println("second matrix: \n"+m2);
Iterator<IndexedMatrixValue> p=outlist.iterator();
if(this.clen==blockColFactor)
{
MatrixBlock first=(MatrixBlock) p.next().getValue();
first.copy(this);
MatrixBlock second=(MatrixBlock) p.next().getValue();
second.copy(m2);
}else
{
int ncol=Math.min(clen+m2.getNumColumns(), blockColFactor);
int part=ncol-clen;
MatrixBlock first=(MatrixBlock) p.next().getValue();
first.reset(rlen, ncol, this.nonZeros+m2.getNonZeros()*part/m2.getNumColumns());
//copy the first matrix
if(this.sparse)
{
if(this.sparseRows!=null)
{
for(int i=0; i<Math.min(rlen, this.sparseRows.length); i++)
{
if(this.sparseRows[i]!=null)
first.appendRow(i, this.sparseRows[i]);
}
}
}else if(this.denseBlock!=null)
{
int sindx=0;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
first.appendValue(r, c, this.denseBlock[sindx]);
sindx++;
}
}
MatrixBlock second=null;
if(part<m2.clen)
{
second=(MatrixBlock) p.next().getValue();
if(m2IsLast)
second.reset(m2.rlen, m2.clen-part, m2.sparse);
else
second.reset(m2.rlen, Math.min(m2.clen-part+nextNCol, blockColFactor), m2.sparse);
}
//copy the second
if(m2.sparse)
{
if(m2.sparseRows!=null)
{
for(int i=0; i<Math.min(m2.rlen, m2.sparseRows.length); i++)
{
if(m2.sparseRows[i]!=null)
{
int[] indexContainer=m2.sparseRows[i].getIndexContainer();
double[] valueContainer=m2.sparseRows[i].getValueContainer();
for(int j=0; j<m2.sparseRows[i].size(); j++)
{
if(indexContainer[j]<part)
first.appendValue(i, clen+indexContainer[j], valueContainer[j]);
else
second.appendValue(i, indexContainer[j]-part, valueContainer[j]);
}
}
}
}
}else if(m2.denseBlock!=null)
{
int sindx=0;
for(int r=0; r<m2.rlen; r++)
{
int c=0;
for(; c<part; c++)
{
first.appendValue(r, clen+c, m2.denseBlock[sindx+c]);
// System.out.println("access "+(sindx+c));
// System.out.println("add first ("+r+", "+(clen+c)+"), "+m2.denseBlock[sindx+c]);
}
for(; c<m2.clen; c++)
{
second.appendValue(r, c-part, m2.denseBlock[sindx+c]);
// System.out.println("access "+(sindx+c));
// System.out.println("add second ("+r+", "+(c-part)+"), "+m2.denseBlock[sindx+c]);
}
sindx+=m2.clen;
}
}
}
}
/**
*
*/
public MatrixValue zeroOutOperations(MatrixValue result, IndexRange range, boolean complementary)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
checkType(result);
double currentSparsity=(double)nonZeros/(double)rlen/(double)clen;
double estimatedSps=currentSparsity*(double)(range.rowEnd-range.rowStart+1)
*(double)(range.colEnd-range.colStart+1)/(double)rlen/(double)clen;
if(!complementary)
estimatedSps=currentSparsity-estimatedSps;
boolean lsparse = evalSparseFormatInMemory(rlen, clen, (long)(estimatedSps*rlen*clen));
if(result==null)
result=new MatrixBlock(rlen, clen, lsparse, (int)(estimatedSps*rlen*clen));
else
result.reset(rlen, clen, lsparse, (int)(estimatedSps*rlen*clen));
if(sparse)
{
if(sparseRows!=null)
{
if(!complementary)//if zero out
{
for(int r=0; r<Math.min((int)range.rowStart, sparseRows.length); r++)
((MatrixBlock) result).appendRow(r, sparseRows[r]);
for(int r=Math.min((int)range.rowEnd+1, sparseRows.length); r<Math.min(rlen, sparseRows.length); r++)
((MatrixBlock) result).appendRow(r, sparseRows[r]);
}
for(int r=(int)range.rowStart; r<=Math.min(range.rowEnd, sparseRows.length-1); r++)
{
if(sparseRows[r]==null)
continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
if(complementary)//if selection
{
int start=sparseRows[r].searchIndexesFirstGTE((int)range.colStart);
if(start<0) continue;
int end=sparseRows[r].searchIndexesFirstGT((int)range.colEnd);
if(end<0 || start>end)
continue;
for(int i=start; i<end; i++)
{
((MatrixBlock) result).appendValue(r, cols[i], values[i]);
}
}else
{
int start=sparseRows[r].searchIndexesFirstGTE((int)range.colStart);
if(start<0) start=sparseRows[r].size();
int end=sparseRows[r].searchIndexesFirstGT((int)range.colEnd);
if(end<0) end=sparseRows[r].size();
for(int i=0; i<start; i++)
{
((MatrixBlock) result).appendValue(r, cols[i], values[i]);
}
for(int i=end; i<sparseRows[r].size(); i++)
{
((MatrixBlock) result).appendValue(r, cols[i], values[i]);
}
}
}
}
}else
{
if(denseBlock!=null)
{
if(complementary)//if selection
{
int offset=((int)range.rowStart)*clen;
for(int r=(int) range.rowStart; r<=range.rowEnd; r++)
{
for(int c=(int) range.colStart; c<=range.colEnd; c++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset+c]);
offset+=clen;
}
}else
{
int offset=0;
int r=0;
for(; r<(int)range.rowStart; r++)
for(int c=0; c<clen; c++, offset++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset]);
for(; r<=(int)range.rowEnd; r++)
{
for(int c=0; c<(int)range.colStart; c++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset+c]);
for(int c=(int)range.colEnd+1; c<clen; c++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset+c]);
offset+=clen;
}
for(; r<rlen; r++)
for(int c=0; c<clen; c++, offset++)
((MatrixBlock) result).appendValue(r, c, denseBlock[offset]);
}
}
}
return result;
}
public MatrixValue aggregateUnaryOperations(AggregateUnaryOperator op, MatrixValue result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
return aggregateUnaryOperations(op, result,
blockingFactorRow, blockingFactorCol, indexesIn, false);
}
public MatrixValue aggregateUnaryOperations(AggregateUnaryOperator op, MatrixValue result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn, boolean inCP)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
CellIndex tempCellIndex = new CellIndex(-1,-1);
op.indexFn.computeDimension(rlen, clen, tempCellIndex);
if(op.aggOp.correctionExists)
{
switch(op.aggOp.correctionLocation)
{
case LASTROW:
tempCellIndex.row++;
break;
case LASTCOLUMN:
tempCellIndex.column++;
break;
case LASTTWOROWS:
tempCellIndex.row+=2;
break;
case LASTTWOCOLUMNS:
tempCellIndex.column+=2;
break;
default:
throw new DMLRuntimeException("unrecognized correctionLocation: "+op.aggOp.correctionLocation);
}
}
if(result==null)
result=new MatrixBlock(tempCellIndex.row, tempCellIndex.column, false);
else
result.reset(tempCellIndex.row, tempCellIndex.column, false);
MatrixBlock ret = (MatrixBlock) result;
if( LibMatrixAgg.isSupportedUnaryAggregateOperator(op) ) {
LibMatrixAgg.aggregateUnaryMatrix(this, ret, op);
LibMatrixAgg.recomputeIndexes(ret, op, blockingFactorRow, blockingFactorCol, indexesIn);
}
else if(op.sparseSafe)
sparseAggregateUnaryHelp(op, ret, blockingFactorRow, blockingFactorCol, indexesIn);
else
denseAggregateUnaryHelp(op, ret, blockingFactorRow, blockingFactorCol, indexesIn);
if(op.aggOp.correctionExists && inCP)
((MatrixBlock)result).dropLastRowsOrColums(op.aggOp.correctionLocation);
return ret;
}
private void sparseAggregateUnaryHelp(AggregateUnaryOperator op, MatrixBlock result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn) throws DMLRuntimeException
{
//initialize result
if(op.aggOp.initialValue!=0)
result.resetDenseWithValue(result.rlen, result.clen, op.aggOp.initialValue);
CellIndex tempCellIndex = new CellIndex(-1,-1);
KahanObject buffer=new KahanObject(0,0);
int r = 0, c = 0;
if(sparse)
{
if(sparseRows!=null)
{
for(r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
tempCellIndex.set(r, cols[i]);
op.indexFn.execute(tempCellIndex, tempCellIndex);
incrementalAggregateUnaryHelp(op.aggOp, result, tempCellIndex.row, tempCellIndex.column, values[i], buffer);
}
}
}
}
else
{
if(denseBlock!=null)
{
int limit=rlen*clen;
for(int i=0; i<limit; i++)
{
r=i/clen;
c=i%clen;
tempCellIndex.set(r, c);
op.indexFn.execute(tempCellIndex, tempCellIndex);
incrementalAggregateUnaryHelp(op.aggOp, result, tempCellIndex.row, tempCellIndex.column, denseBlock[i], buffer);
}
}
}
}
private void denseAggregateUnaryHelp(AggregateUnaryOperator op, MatrixBlock result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn) throws DMLRuntimeException
{
//initialize
if(op.aggOp.initialValue!=0)
result.resetDenseWithValue(result.rlen, result.clen, op.aggOp.initialValue);
CellIndex tempCellIndex = new CellIndex(-1,-1);
KahanObject buffer=new KahanObject(0,0);
for(int i=0; i<rlen; i++)
for(int j=0; j<clen; j++)
{
tempCellIndex.set(i, j);
op.indexFn.execute(tempCellIndex, tempCellIndex);
if(op.aggOp.correctionExists
&& op.aggOp.correctionLocation == CorrectionLocationType.LASTCOLUMN
&& op.aggOp.increOp.fn instanceof Builtin
&& ( ((Builtin)(op.aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MAXINDEX
|| ((Builtin)(op.aggOp.increOp.fn)).bFunc == Builtin.BuiltinFunctionCode.MININDEX)
){
double currMaxValue = result.quickGetValue(i, 1);
long newMaxIndex = UtilFunctions.cellIndexCalculation(indexesIn.getColumnIndex(), blockingFactorCol, j);
double newMaxValue = quickGetValue(i, j);
double update = op.aggOp.increOp.fn.execute(newMaxValue, currMaxValue);
//System.out.println("currV="+currMaxValue+",newV="+newMaxValue+",newIX="+newMaxIndex+",update="+update);
if(update == 1){
result.quickSetValue(i, 0, newMaxIndex);
result.quickSetValue(i, 1, newMaxValue);
}
}else
incrementalAggregateUnaryHelp(op.aggOp, result, tempCellIndex.row, tempCellIndex.column, quickGetValue(i,j), buffer);
}
}
private void incrementalAggregateUnaryHelp(AggregateOperator aggOp, MatrixBlock result, int row, int column,
double newvalue, KahanObject buffer) throws DMLRuntimeException
{
if(aggOp.correctionExists)
{
if(aggOp.correctionLocation==CorrectionLocationType.LASTROW || aggOp.correctionLocation==CorrectionLocationType.LASTCOLUMN)
{
int corRow=row, corCol=column;
if(aggOp.correctionLocation==CorrectionLocationType.LASTROW)//extra row
corRow++;
else if(aggOp.correctionLocation==CorrectionLocationType.LASTCOLUMN)
corCol++;
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
buffer._sum=result.quickGetValue(row, column);
buffer._correction=result.quickGetValue(corRow, corCol);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newvalue);
result.quickSetValue(row, column, buffer._sum);
result.quickSetValue(corRow, corCol, buffer._correction);
}else if(aggOp.correctionLocation==CorrectionLocationType.NONE)
{
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}else// for mean
{
int corRow=row, corCol=column;
int countRow=row, countCol=column;
if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOROWS)
{
countRow++;
corRow+=2;
}
else if(aggOp.correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS)
{
countCol++;
corCol+=2;
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
buffer._sum=result.quickGetValue(row, column);
buffer._correction=result.quickGetValue(corRow, corCol);
double count=result.quickGetValue(countRow, countCol)+1.0;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newvalue, count);
result.quickSetValue(row, column, buffer._sum);
result.quickSetValue(corRow, corCol, buffer._correction);
result.quickSetValue(countRow, countCol, count);
}
}else
{
newvalue=aggOp.increOp.fn.execute(result.quickGetValue(row, column), newvalue);
result.quickSetValue(row, column, newvalue);
}
}
/**
*
* @param correctionLocation
*/
public void dropLastRowsOrColums(CorrectionLocationType correctionLocation)
{
//do nothing
if( correctionLocation==CorrectionLocationType.NONE
|| correctionLocation==CorrectionLocationType.INVALID )
{
return;
}
//determine number of rows/cols to be removed
int step = ( correctionLocation==CorrectionLocationType.LASTTWOROWS
|| correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS) ? 2 : 1;
//e.g., colSums, colMeans, colMaxs, colMeans
if( correctionLocation==CorrectionLocationType.LASTROW
|| correctionLocation==CorrectionLocationType.LASTTWOROWS )
{
if( sparse ) //SPARSE
{
if(sparseRows!=null)
for(int i=1; i<=step; i++)
if(sparseRows[rlen-i]!=null)
this.nonZeros-=sparseRows[rlen-i].size();
}
else //DENSE
{
if(denseBlock!=null)
for(int i=(rlen-step)*clen; i<rlen*clen; i++)
if(denseBlock[i]!=0)
this.nonZeros--;
}
//just need to shrink the dimension, the deleted rows won't be accessed
rlen -= step;
}
//e.g., rowSums, rowsMeans, rowsMaxs, rowsMeans
if( correctionLocation==CorrectionLocationType.LASTCOLUMN
|| correctionLocation==CorrectionLocationType.LASTTWOCOLUMNS )
{
if(sparse) //SPARSE
{
if(sparseRows!=null)
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
if(sparseRows[r]!=null)
{
int newSize=sparseRows[r].searchIndexesFirstGTE(clen-step);
if(newSize>=0)
{
this.nonZeros-=sparseRows[r].size()-newSize;
sparseRows[r].truncate(newSize);
}
}
}
}
else //DENSE
{
if(this.denseBlock!=null)
{
//the first row doesn't need to be copied
int targetIndex=clen-step;
int sourceOffset=clen;
this.nonZeros=0;
for(int i=0; i<targetIndex; i++)
if(denseBlock[i]!=0)
this.nonZeros++;
//start from the 2nd row
for(int r=1; r<rlen; r++)
{
for(int c=0; c<clen-step; c++)
{
if((denseBlock[targetIndex]=denseBlock[sourceOffset+c])!=0)
this.nonZeros++;
targetIndex++;
}
sourceOffset+=clen;
}
}
}
clen -= step;
}
}
public CM_COV_Object cmOperations(CMOperator op)
throws DMLRuntimeException
{
/* this._data must be a 1 dimensional vector */
if ( this.getNumColumns() != 1) {
throw new DMLRuntimeException("Central Moment can not be computed on ["
+ this.getNumRows() + "," + this.getNumColumns() + "] matrix.");
}
CM_COV_Object cmobj = new CM_COV_Object();
int nzcount = 0;
if(sparse && sparseRows!=null) //SPARSE
{
for(int r=0; r<Math.min(rlen, sparseRows.length); r++)
{
if(sparseRows[r]==null)
continue;
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++) {
op.fn.execute(cmobj, values[i]);
nzcount++;
}
}
// account for zeros in the vector
op.fn.execute(cmobj, 0.0, this.getNumRows()-nzcount);
}
else if(denseBlock!=null) //DENSE
{
//always vector (see check above)
for(int i=0; i<rlen; i++)
op.fn.execute(cmobj, denseBlock[i]);
}
return cmobj;
}
public CM_COV_Object cmOperations(CMOperator op, MatrixBlock weights)
throws DMLRuntimeException
{
/* this._data must be a 1 dimensional vector */
if ( this.getNumColumns() != 1 || weights.getNumColumns() != 1) {
throw new DMLRuntimeException("Central Moment can be computed only on 1-dimensional column matrices.");
}
if ( this.getNumRows() != weights.getNumRows() || this.getNumColumns() != weights.getNumColumns()) {
throw new DMLRuntimeException("Covariance: Mismatching dimensions between input and weight matrices - " +
"["+this.getNumRows()+","+this.getNumColumns() +"] != ["
+ weights.getNumRows() + "," + weights.getNumColumns() +"]");
}
CM_COV_Object cmobj = new CM_COV_Object();
if (sparse && sparseRows!=null) //SPARSE
{
for(int i=0; i < rlen; i++)
op.fn.execute(cmobj, this.quickGetValue(i,0), weights.quickGetValue(i,0));
}
else if(denseBlock!=null) //DENSE
{
//always vectors (see check above)
if( !weights.sparse )
{
//both dense vectors (default case)
if(weights.denseBlock!=null)
for( int i=0; i<rlen; i++ )
op.fn.execute(cmobj, denseBlock[i], weights.denseBlock[i]);
}
else
{
for(int i=0; i<rlen; i++)
op.fn.execute(cmobj, denseBlock[i], weights.quickGetValue(i,0) );
}
}
return cmobj;
}
public CM_COV_Object covOperations(COVOperator op, MatrixBlock that)
throws DMLRuntimeException
{
/* this._data must be a 1 dimensional vector */
if ( this.getNumColumns() != 1 || that.getNumColumns() != 1 ) {
throw new DMLRuntimeException("Covariance can be computed only on 1-dimensional column matrices.");
}
if ( this.getNumRows() != that.getNumRows() || this.getNumColumns() != that.getNumColumns()) {
throw new DMLRuntimeException("Covariance: Mismatching input matrix dimensions - " +
"["+this.getNumRows()+","+this.getNumColumns() +"] != ["
+ that.getNumRows() + "," + that.getNumColumns() +"]");
}
CM_COV_Object covobj = new CM_COV_Object();
if(sparse && sparseRows!=null) //SPARSE
{
for(int i=0; i < rlen; i++ )
op.fn.execute(covobj, this.quickGetValue(i,0), that.quickGetValue(i,0));
}
else if(denseBlock!=null) //DENSE
{
//always vectors (see check above)
if( !that.sparse )
{
//both dense vectors (default case)
if(that.denseBlock!=null)
for( int i=0; i<rlen; i++ )
op.fn.execute(covobj, denseBlock[i], that.denseBlock[i]);
}
else
{
for(int i=0; i<rlen; i++)
op.fn.execute(covobj, denseBlock[i], that.quickGetValue(i,0));
}
}
return covobj;
}
public CM_COV_Object covOperations(COVOperator op, MatrixBlock that, MatrixBlock weights)
throws DMLRuntimeException
{
/* this._data must be a 1 dimensional vector */
if ( this.getNumColumns() != 1 || that.getNumColumns() != 1 || weights.getNumColumns() != 1) {
throw new DMLRuntimeException("Covariance can be computed only on 1-dimensional column matrices.");
}
if ( this.getNumRows() != that.getNumRows() || this.getNumColumns() != that.getNumColumns()) {
throw new DMLRuntimeException("Covariance: Mismatching input matrix dimensions - " +
"["+this.getNumRows()+","+this.getNumColumns() +"] != ["
+ that.getNumRows() + "," + that.getNumColumns() +"]");
}
if ( this.getNumRows() != weights.getNumRows() || this.getNumColumns() != weights.getNumColumns()) {
throw new DMLRuntimeException("Covariance: Mismatching dimensions between input and weight matrices - " +
"["+this.getNumRows()+","+this.getNumColumns() +"] != ["
+ weights.getNumRows() + "," + weights.getNumColumns() +"]");
}
CM_COV_Object covobj = new CM_COV_Object();
if(sparse && sparseRows!=null) //SPARSE
{
for(int i=0; i < rlen; i++ )
op.fn.execute(covobj, this.quickGetValue(i,0), that.quickGetValue(i,0), weights.quickGetValue(i,0));
}
else if(denseBlock!=null) //DENSE
{
//always vectors (see check above)
if( !that.sparse && !weights.sparse )
{
//all dense vectors (default case)
if(that.denseBlock!=null)
for( int i=0; i<rlen; i++ )
op.fn.execute(covobj, denseBlock[i], that.denseBlock[i], weights.denseBlock[i]);
}
else
{
for(int i=0; i<rlen; i++)
op.fn.execute(covobj, denseBlock[i], that.quickGetValue(i,0), weights.quickGetValue(i,0));
}
}
return covobj;
}
public MatrixValue sortOperations(MatrixValue weights, MatrixValue result) throws DMLRuntimeException, DMLUnsupportedOperationException {
boolean wtflag = (weights!=null);
MatrixBlock wts= (weights == null ? null : checkType(weights));
checkType(result);
if ( getNumColumns() != 1 ) {
throw new DMLRuntimeException("Invalid input dimensions (" + getNumRows() + "x" + getNumColumns() + ") to sort operation.");
}
if ( wts != null && wts.getNumColumns() != 1 ) {
throw new DMLRuntimeException("Invalid weight dimensions (" + wts.getNumRows() + "x" + wts.getNumColumns() + ") to sort operation.");
}
// prepare result, currently always dense
// #rows in temp matrix = 1 + #nnz in the input ( 1 is for the "zero" value)
int dim1 = (int) (1+this.getNonZeros());
if(result==null)
result=new MatrixBlock(dim1, 2, false);
else
result.reset(dim1, 2, false);
// Copy the input elements into a temporary array for sorting
// First column is data and second column is weights
// (since the inputs are vectors, they are likely dense - hence quickget is sufficient)
MatrixBlock tdw = new MatrixBlock(dim1, 2, false);
double d, w, zero_wt=0;
int ind = 1;
if( wtflag ) // w/ weights
{
for ( int i=0; i<rlen; i++ ) {
d = quickGetValue(i,0);
w = wts.quickGetValue(i,0);
if ( d != 0 ) {
tdw.quickSetValue(ind, 0, d);
tdw.quickSetValue(ind, 1, w);
ind++;
}
else
zero_wt += w;
}
}
else //w/o weights
{
zero_wt = getNumRows() - getNonZeros();
for( int i=0; i<rlen; i++ ) {
d = quickGetValue(i,0);
if( d != 0 ){
tdw.quickSetValue(ind, 0, d);
tdw.quickSetValue(ind, 1, 1);
ind++;
}
}
}
tdw.quickSetValue(0, 0, 0.0);
tdw.quickSetValue(0, 1, zero_wt); //num zeros in input
// Sort td and tw based on values inside td (ascending sort), incl copy into result
SortIndex sfn = SortIndex.getSortIndexFnObject(1, false, false);
ReorgOperator rop = new ReorgOperator(sfn);
LibMatrixReorg.reorg(tdw, (MatrixBlock)result, rop);
return result;
}
public double interQuartileMean() throws DMLRuntimeException {
double sum_wt = sumWeightForQuantile();
double q25d = 0.25*sum_wt;
double q75d = 0.75*sum_wt;
int q25i = (int) Math.ceil(q25d);
int q75i = (int) Math.ceil(q75d);
// skip until (but excluding) q25
int t = 0, i=-1;
while(i<getNumRows() && t < q25i) {
i++;
//System.out.println(" " + i + ": " + quickGetValue(i,0) + "," + quickGetValue(i,1));
t += quickGetValue(i,1);
}
// compute the portion of q25
double runningSum = (t-q25d)*quickGetValue(i,0);
// add until (including) q75
while(i<getNumRows() && t < q75i) {
i++;
t += quickGetValue(i,1);
runningSum += quickGetValue(i,0)*quickGetValue(i,1);
}
// subtract additional portion of q75
runningSum -= (t-q75d)*quickGetValue(i,0);
return runningSum/(sum_wt*0.5);
}
/**
* Computes the weighted interQuartileMean.
* The matrix block ("this" pointer) has two columns, in which the first column
* refers to the data and second column denotes corresponding weights.
*
* @return InterQuartileMean
* @throws DMLRuntimeException
*/
public double interQuartileMeanOLD() throws DMLRuntimeException {
double sum_wt = sumWeightForQuantile();
int fromPos = (int) Math.ceil(0.25*sum_wt);
int toPos = (int) Math.ceil(0.75*sum_wt);
int selectRange = toPos-fromPos; // range: (fromPos,toPos]
if ( selectRange == 0 )
return 0.0;
int index, count=0;
// The first row (0^th row) has value 0.
// If it has a non-zero weight i.e., input data has zero values
// then "index" must start from 0, otherwise we skip the first row
// and start with the next value in the data, which is in the 1st row.
if ( quickGetValue(0,1) > 0 )
index = 0;
else
index = 1;
// keep scanning the weights, until we hit the required position <code>fromPos</code>
while ( count < fromPos ) {
count += quickGetValue(index,1);
++index;
}
double runningSum;
double val;
int wt, selectedCount;
runningSum = (count-fromPos) * quickGetValue(index-1, 0);
selectedCount = (count-fromPos);
while(count <= toPos ) {
val = quickGetValue(index,0);
wt = (int) quickGetValue(index,1);
runningSum += (val * Math.min(wt, selectRange-selectedCount));
selectedCount += Math.min(wt, selectRange-selectedCount);
count += wt;
++index;
}
//System.out.println(fromPos + ", " + toPos + ": " + count + ", "+ runningSum + ", " + selectedCount);
return runningSum/selectedCount;
}
public MatrixValue pickValues(MatrixValue quantiles, MatrixValue ret)
throws DMLUnsupportedOperationException, DMLRuntimeException {
MatrixBlock qs=checkType(quantiles);
if ( qs.clen != 1 ) {
throw new DMLRuntimeException("Multiple quantiles can only be computed on a 1D matrix");
}
MatrixBlock output = checkType(ret);
if(output==null)
output=new MatrixBlock(qs.rlen, qs.clen, false); // resulting matrix is mostly likely be dense
else
output.reset(qs.rlen, qs.clen, false);
for ( int i=0; i < qs.rlen; i++ ) {
output.quickSetValue(i, 0, this.pickValue(qs.quickGetValue(i,0)) );
}
return output;
}
public double median() throws DMLRuntimeException {
double sum_wt = sumWeightForQuantile();
return pickValue(0.5, sum_wt%2==0);
}
public double pickValue(double quantile) throws DMLRuntimeException{
return pickValue(quantile, false);
}
public double pickValue(double quantile, boolean average)
throws DMLRuntimeException
{
double sum_wt = sumWeightForQuantile();
// do averaging only if it is asked for; and sum_wt is even
average = average && (sum_wt%2 == 0);
int pos = (int) Math.ceil(quantile*sum_wt);
int t = 0, i=-1;
do {
i++;
t += quickGetValue(i,1);
} while(t<pos && i < getNumRows());
//System.out.println("values: " + quickGetValue(i,0) + "," + quickGetValue(i,1) + " -- " + quickGetValue(i+1,0) + "," + quickGetValue(i+1,1));
if ( quickGetValue(i,1) != 0 ) {
// i^th value is present in the data set, simply return it
if ( average ) {
if(pos < t) {
return quickGetValue(i,0);
}
if(quickGetValue(i+1,1) != 0)
return (quickGetValue(i,0)+quickGetValue(i+1,0))/2;
else
// (i+1)^th value is 0. So, fetch (i+2)^th value
return (quickGetValue(i,0)+quickGetValue(i+2,0))/2;
}
else
return quickGetValue(i, 0);
}
else {
// i^th value is not present in the data set.
// It can only happen in the case where i^th value is 0.0; and 0.0 is not present in the data set (but introduced by sort).
if ( i+1 < getNumRows() )
// when 0.0 is not the last element in the sorted order
return quickGetValue(i+1,0);
else
// when 0.0 is the last element in the sorted order (input data is all negative)
return quickGetValue(i-1,0);
}
}
/**
* In a given two column matrix, the second column denotes weights.
* This function computes the total weight
*
* @return
* @throws DMLRuntimeException
*/
private double sumWeightForQuantile()
throws DMLRuntimeException
{
double sum_wt = 0;
for (int i=0; i < getNumRows(); i++ )
sum_wt += quickGetValue(i, 1);
if ( Math.floor(sum_wt) < sum_wt ) {
throw new DMLRuntimeException("Unexpected error while computing quantile -- weights must be integers.");
}
return sum_wt;
}
/**
*
* @param m1Index
* @param m1Value
* @param m2Index
* @param m2Value
* @param result
* @param op
* @return
* @throws DMLUnsupportedOperationException
* @throws DMLRuntimeException
*/
public MatrixValue aggregateBinaryOperations(MatrixIndexes m1Index, MatrixValue m1Value, MatrixIndexes m2Index, MatrixValue m2Value,
MatrixValue result, AggregateBinaryOperator op )
throws DMLUnsupportedOperationException, DMLRuntimeException
{
return aggregateBinaryOperations(m1Value, m2Value, result, op);
}
/**
*
*/
public MatrixValue aggregateBinaryOperations(MatrixValue m1Value, MatrixValue m2Value, MatrixValue result, AggregateBinaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//check input types, dimensions, configuration
MatrixBlock m1 = checkType(m1Value);
MatrixBlock m2 = checkType(m2Value);
MatrixBlock ret = checkType(result);
if( m1.clen != m2.rlen ) {
throw new RuntimeException("Dimensions do not match for matrix multiplication ("+m1.clen+"!="+m2.rlen+").");
}
if( !(op.binaryFn instanceof Multiply && op.aggOp.increOp.fn instanceof Plus) ) {
throw new DMLRuntimeException("Unsupported binary aggregate operation: ("+op.binaryFn+", "+op.aggOp+").");
}
//setup meta data (dimensions, sparsity)
int rl = m1.rlen;
int cl = m2.clen;
SparsityEstimate sp = estimateSparsityOnAggBinary(m1, m2, op);
//create output matrix block
if( ret==null )
ret = new MatrixBlock(rl, cl, sp.sparse, sp.estimatedNonZeros);//m1.sparse&&m2.sparse);
else
ret.reset(rl, cl, sp.sparse, sp.estimatedNonZeros);//m1.sparse&&m2.sparse);
//compute matrix multiplication (only supported binary aggregate operation)
if( op.getNumThreads() > 1 )
LibMatrixMult.matrixMult(m1, m2, ret, op.getNumThreads());
else
LibMatrixMult.matrixMult(m1, m2, ret);
return ret;
}
/**
*
* @param m1
* @param m2
* @param m3
* @param op
* @return
* @throws DMLUnsupportedOperationException
* @throws DMLRuntimeException
*/
public ScalarObject aggregateTernaryOperations(MatrixBlock m1, MatrixBlock m2, MatrixBlock m3, AggregateBinaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//check input dimensions and operators
if( m1.rlen!=m2.rlen || m2.rlen!=m3.rlen || m1.clen!=1 || m2.clen!=1 || m3.clen!=1 )
throw new DMLRuntimeException("Invalid dimensions for aggregate tertiary ("+m1.rlen+"x"+m1.clen+", "+m2.rlen+"x"+m2.clen+", "+m3.rlen+"x"+m3.clen+").");
if( !( op.aggOp.increOp.fn instanceof KahanPlus && op.binaryFn instanceof Multiply) )
throw new DMLRuntimeException("Unsupported operator for aggregate tertiary operations.");
//early abort if any block is empty
if( m1.isEmptyBlock(false) || m2.isEmptyBlock(false) || m3.isEmptyBlock(false) )
return new DoubleObject(0);
//setup meta data (dimensions, sparsity)
int rlen = m1.rlen;
//compute block operations
KahanObject kbuff = new KahanObject(0, 0);
KahanPlus kplus = KahanPlus.getKahanPlusFnObject();
if( !m1.sparse && !m2.sparse && !m3.sparse ) //DENSE
{
double[] a = m1.denseBlock;
double[] b = m2.denseBlock;
double[] c = m3.denseBlock;
for( int i=0; i<rlen; i++ ) {
double val = a[i] * b[i] * c[i];
kplus.execute2( kbuff, val );
}
}
else //GENERAL CASE
{
for( int i=0; i<rlen; i++ ) {
double val1 = m1.quickGetValue(i, 0);
double val2 = m2.quickGetValue(i, 0);
double val3 = m3.quickGetValue(i, 0);
double val = val1 * val2 * val3;
kplus.execute2( kbuff, val );
}
}
//create output
DoubleObject ret = new DoubleObject(kbuff._sum);
return ret;
}
/**
* Invocation from CP instructions. The aggregate is computed on the groups object
* against target and weights.
*
* Notes:
* * The computed number of groups is reused for multiple invocations with different target.
* * This implementation supports that the target is passed as column or row vector,
* in case of row vectors we also use sparse-safe implementations for sparse safe
* aggregation operators.
*
* @param tgt
* @param wghts
* @param ret
* @param op
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
public MatrixValue groupedAggOperations(MatrixValue tgt, MatrixValue wghts, MatrixValue ret, int ngroups, Operator op)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//setup input matrices
// this <- groups
MatrixBlock target = checkType(tgt);
MatrixBlock weights = checkType(wghts);
//check valid dimensions
if( this.getNumColumns() != 1 || (weights!=null && weights.getNumColumns()!=1) )
throw new DMLRuntimeException("groupedAggregate can only operate on 1-dimensional column matrices for groups and weights.");
if( target.getNumColumns() != 1 && op instanceof CMOperator )
throw new DMLRuntimeException("groupedAggregate can only operate on 1-dimensional column matrices for target (for this aggregation function).");
if( target.getNumColumns() != 1 && target.getNumRows()!=1 )
throw new DMLRuntimeException("groupedAggregate can only operate on 1-dimensional column or row matrix for target.");
if( this.getNumRows() != Math.max(target.getNumRows(),target.getNumColumns()) || (weights != null && this.getNumRows() != weights.getNumRows()) )
throw new DMLRuntimeException("groupedAggregate can only operate on matrices with equal dimensions.");
// obtain numGroups from instruction, if provided
if (ngroups > 0)
numGroups = ngroups;
// Determine the number of groups
if( numGroups <= 0 ) //reuse if available
{
double min = this.min();
double max = this.max();
if ( min <= 0 )
throw new DMLRuntimeException("Invalid value (" + min + ") encountered in 'groups' while computing groupedAggregate");
if ( max <= 0 )
throw new DMLRuntimeException("Invalid value (" + max + ") encountered in 'groups' while computing groupedAggregate.");
numGroups = (int) max;
}
// Allocate result matrix
MatrixBlock result = checkType(ret);
boolean result_sparsity = estimateSparsityOnGroupedAgg(rlen, numGroups);
if(result==null)
result=new MatrixBlock(numGroups, 1, result_sparsity);
else
result.reset(numGroups, 1, result_sparsity);
// Compute the result
double w = 1; // default weight
//CM operator for count, mean, variance
//note: current support only for column vectors
if(op instanceof CMOperator) {
// initialize required objects for storing the result of CM operations
CM cmFn = CM.getCMFnObject(((CMOperator) op).getAggOpType());
CM_COV_Object[] cmValues = new CM_COV_Object[numGroups];
for ( int i=0; i < numGroups; i++ )
cmValues[i] = new CM_COV_Object();
for ( int i=0; i < this.getNumRows(); i++ ) {
int g = (int) this.quickGetValue(i, 0);
if ( g > numGroups )
continue;
double d = target.quickGetValue(i,0);
if ( weights != null )
w = weights.quickGetValue(i,0);
// cmValues is 0-indexed, whereas range of values for g = [1,numGroups]
cmFn.execute(cmValues[g-1], d, w);
}
// extract the required value from each CM_COV_Object
for ( int i=0; i < numGroups; i++ )
// result is 0-indexed, so is cmValues
result.quickSetValue(i, 0, cmValues[i].getRequiredResult(op));
}
//Aggregate operator for sum (via kahan sum)
//note: support for row/column vectors and dense/sparse
else if( op instanceof AggregateOperator )
{
//the only aggregate operator that is supported here is sum,
//furthermore, we always use KahanPlus and hence aggop.correctionExists is true
AggregateOperator aggop = (AggregateOperator) op;
//default case for aggregate(sum)
groupedAggregateKahanPlus(target, weights, result, aggop);
}
else
throw new DMLRuntimeException("Invalid operator (" + op + ") encountered while processing groupedAggregate.");
return result;
}
/**
* This is a specific implementation for aggregate(fn="sum"), where we use KahanPlus for numerical
* stability. In contrast to other functions of aggregate, this implementation supports row and column
* vectors for target and exploits sparse representations since KahanPlus is sparse-safe.
*
* @param target
* @param weights
* @param op
* @throws DMLRuntimeException
*/
private void groupedAggregateKahanPlus( MatrixBlock target, MatrixBlock weights, MatrixBlock result, AggregateOperator aggop ) throws DMLRuntimeException
{
boolean rowVector = target.getNumColumns()>1;
double w = 1; //default weight
//skip empty blocks (sparse-safe operation)
if( target.isEmptyBlock(false) )
return;
//init group buffers
KahanObject[] buffer = new KahanObject[numGroups];
for(int i=0; i < numGroups; i++ )
buffer[i] = new KahanObject(aggop.initialValue, 0);
if( rowVector ) //target is rowvector
{
if( target.sparse ) //SPARSE target
{
if( target.sparseRows[0]!=null )
{
int len = target.sparseRows[0].size();
int[] aix = target.sparseRows[0].getIndexContainer();
double[] avals = target.sparseRows[0].getValueContainer();
for( int j=0; j<len; j++ ) //for each nnz
{
int g = (int) this.quickGetValue(aix[j], 0);
if ( g > numGroups )
continue;
if ( weights != null )
w = weights.quickGetValue(aix[j],0);
aggop.increOp.fn.execute(buffer[g-1], avals[j]*w);
}
}
}
else //DENSE target
{
for ( int i=0; i < target.getNumColumns(); i++ ) {
double d = target.denseBlock[ i ];
if( d != 0 ) //sparse-safe
{
int g = (int) this.quickGetValue(i, 0);
if ( g > numGroups )
continue;
if ( weights != null )
w = weights.quickGetValue(i,0);
// buffer is 0-indexed, whereas range of values for g = [1,numGroups]
aggop.increOp.fn.execute(buffer[g-1], d*w);
}
}
}
}
else //column vector (always dense, but works for sparse as well)
{
for ( int i=0; i < this.getNumRows(); i++ )
{
double d = target.quickGetValue(i,0);
if( d != 0 ) //sparse-safe
{
int g = (int) this.quickGetValue(i, 0);
if ( g > numGroups )
continue;
if ( weights != null )
w = weights.quickGetValue(i,0);
// buffer is 0-indexed, whereas range of values for g = [1,numGroups]
aggop.increOp.fn.execute(buffer[g-1], d*w);
}
}
}
// extract the results from group buffers
for ( int i=0; i < numGroups; i++ )
result.quickSetValue(i, 0, buffer[i]._sum);
}
public MatrixValue removeEmptyOperations( MatrixValue ret, boolean rows )
throws DMLRuntimeException, DMLUnsupportedOperationException
{
//check for empty inputs
//(the semantics of removeEmpty are that for an empty m-by-n matrix, the output
//is an empty 1-by-n or m-by-1 matrix because we dont allow matrices with dims 0)
if( nonZeros==0 ) {
if( rows )
ret.reset(1, clen, false);
else //cols
ret.reset(rlen, 1, false);
return ret;
}
MatrixBlock result = checkType(ret);
if( rows )
return removeEmptyRows(result);
else //cols
return removeEmptyColumns(result);
}
/**
*
* @param ret
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
private MatrixBlock removeEmptyRows(MatrixBlock ret)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
final int m = rlen;
final int n = clen;
//Step 1: scan block and determine non-empty rows
boolean[] flags = new boolean[ m ]; //false
int rlen2 = 0;
if( sparse ) //SPARSE
{
SparseRow[] a = sparseRows;
for ( int i=0; i < m; i++ )
if ( a[i] != null && !a[i].isEmpty() ) {
flags[i] = true;
rlen2++;
}
}
else //DENSE
{
double[] a = denseBlock;
for(int i=0, aix=0; i<m; i++, aix+=n) {
for(int j=0; j<n; j++)
if( a[aix+j] != 0 )
{
flags[i] = true;
rlen2++;
//early abort for current row
break;
}
}
}
//Step 2: reset result and copy rows
//dense stays dense, sparse might be dense/sparse
rlen2 = Math.max(rlen2, 1); //ensure valid output
boolean sp = evalSparseFormatInMemory(rlen2, n, nonZeros);
ret.reset(rlen2, n, sp);
if( sparse ) //SPARSE
{
//note: output dense or sparse
for( int i=0, cix=0; i<m; i++ )
if( flags[i] )
ret.appendRow(cix++, sparseRows[i]);
}
else //DENSE
{
ret.allocateDenseBlock();
double[] a = denseBlock;
double[] c = ret.denseBlock;
for( int i=0, aix=0, cix=0; i<m; i++, aix+=n )
if( flags[i] )
{
System.arraycopy(a, aix, c, cix, n);
cix += n; //target index
}
}
//check sparsity
ret.nonZeros = this.nonZeros;
ret.examSparsity();
return ret;
}
/**
*
* @param ret
* @return
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
private MatrixBlock removeEmptyColumns(MatrixBlock ret)
throws DMLRuntimeException, DMLUnsupportedOperationException
{
final int m = rlen;
final int n = clen;
//Step 1: scan block and determine non-empty columns
//(we optimized for cache-friendly behavior and hence don't do early abort)
boolean[] flags = new boolean[ n ]; //false
if( sparse ) //SPARSE
{
SparseRow[] a = sparseRows;
for( int i=0; i<m; i++ )
if ( a[i] != null && !a[i].isEmpty() ) {
int alen = a[i].size();
int[] aix = a[i].getIndexContainer();
for( int j=0; j<alen; j++ )
flags[ aix[j] ] = true;
}
}
else //DENSE
{
double[] a = denseBlock;
for(int i=0, aix=0; i<m; i++)
for(int j=0; j<n; j++, aix++)
if( a[aix] != 0 )
flags[j] = true;
}
//Step 2: determine number of columns
int clen2 = 0;
for( int j=0; j<n; j++ )
clen2 += flags[j] ? 1 : 0;
//Step 3: create mapping of flags to target indexes
int[] cix = new int[n];
for( int j=0, pos=0; j<n; j++ )
if( flags[j] )
cix[j] = pos++;
//Step 3: reset result and copy cols
//dense stays dense, sparse might be dense/sparse
clen2 = Math.max(clen2, 1); //ensure valid output
boolean sp = evalSparseFormatInMemory(m, clen2, nonZeros);
ret.reset(m, clen2, sp);
if( sparse ) //SPARSE
{
//note: output dense or sparse
SparseRow[] a = sparseRows;
for( int i=0; i<m; i++ )
if ( a[i] != null && !a[i].isEmpty() ) {
int alen = a[i].size();
int[] aix = a[i].getIndexContainer();
double[] avals = a[i].getValueContainer();
for( int j=0; j<alen; j++ )
ret.appendValue(i, cix[aix[j]], avals[j]);
}
}
else //DENSE
{
ret.allocateDenseBlock();
double[] a = denseBlock;
double[] c = ret.denseBlock;
for(int i=0, aix=0, lcix=0; i<m; i++, lcix+=clen2)
for(int j=0; j<n; j++, aix++)
if( a[aix] != 0 )
c[ lcix+cix[j] ] = a[aix];
}
//check sparsity
ret.nonZeros = this.nonZeros;
ret.examSparsity();
return ret;
}
@Override
public MatrixValue replaceOperations(MatrixValue result, double pattern, double replacement)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock ret = checkType(result);
examSparsity(); //ensure its in the right format
ret.reset(rlen, clen, sparse);
if( nonZeros == 0 && pattern != 0 )
return ret; //early abort
boolean NaNpattern = Double.isNaN(pattern);
if( sparse ) //SPARSE
{
if( pattern != 0d ) //SPARSE <- SPARSE (sparse-safe)
{
ret.allocateSparseRowsBlock();
SparseRow[] a = sparseRows;
SparseRow[] c = ret.sparseRows;
for( int i=0; i<rlen; i++ )
{
SparseRow arow = a[ i ];
if( arow!=null && !arow.isEmpty() )
{
SparseRow crow = new SparseRow(arow.size());
int alen = arow.size();
int[] aix = arow.getIndexContainer();
double[] avals = arow.getValueContainer();
for( int j=0; j<alen; j++ )
{
double val = avals[j];
if( val== pattern || (NaNpattern && Double.isNaN(val)) )
crow.append(aix[j], replacement);
else
crow.append(aix[j], val);
}
c[ i ] = crow;
}
}
}
else //DENSE <- SPARSE
{
ret.sparse = false;
ret.allocateDenseBlock();
SparseRow[] a = sparseRows;
double[] c = ret.denseBlock;
//initialize with replacement (since all 0 values, see SPARSITY_TURN_POINT)
Arrays.fill(c, replacement);
//overwrite with existing values (via scatter)
if( a != null ) //check for empty matrix
for( int i=0, cix=0; i<rlen; i++, cix+=clen )
{
SparseRow arow = a[ i ];
if( arow!=null && !arow.isEmpty() )
{
int alen = arow.size();
int[] aix = arow.getIndexContainer();
double[] avals = arow.getValueContainer();
for( int j=0; j<alen; j++ )
if( avals[ j ] != 0 )
c[ cix+aix[j] ] = avals[ j ];
}
}
}
}
else //DENSE <- DENSE
{
int mn = ret.rlen * ret.clen;
ret.allocateDenseBlock();
double[] a = denseBlock;
double[] c = ret.denseBlock;
for( int i=0; i<mn; i++ )
{
double val = a[i];
if( val== pattern || (NaNpattern && Double.isNaN(val)) )
c[i] = replacement;
else
c[i] = a[i];
}
}
ret.recomputeNonZeros();
ret.examSparsity();
return ret;
}
/**
* D = ctable(A,v2,W)
* this <- A; scalarThat <- v2; that2 <- W; result <- D
*
* (i1,j1,v1) from input1 (this)
* (v2) from sclar_input2 (scalarThat)
* (i3,j3,w) from input3 (that2)
* @throws DMLRuntimeException
* @throws DMLUnsupportedOperationException
*/
@Override
public void ternaryOperations(Operator op, double scalarThat,
MatrixValue that2Val, HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that2 = checkType(that2Val);
CTable ctable = CTable.getCTableFnObject();
double v2 = scalarThat;
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
if ( resultBlock == null ) {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double w = that2.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultMap);
}
}
else {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double w = that2.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultBlock);
}
resultBlock.recomputeNonZeros();
}
}
/**
* D = ctable(A,v2,w)
* this <- A; scalar_that <- v2; scalar_that2 <- w; result <- D
*
* (i1,j1,v1) from input1 (this)
* (v2) from sclar_input2 (scalarThat)
* (w) from scalar_input3 (scalarThat2)
*/
@Override
public void ternaryOperations(Operator op, double scalarThat,
double scalarThat2, HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
CTable ctable = CTable.getCTableFnObject();
double v2 = scalarThat;
double w = scalarThat2;
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
if ( resultBlock == null ) {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultMap);
}
}
else {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultBlock);
}
resultBlock.recomputeNonZeros();
}
}
/**
* Specific ctable case of ctable(seq(...),X), where X is the only
* matrix input. The 'left' input parameter specifies if the seq appeared
* on the left, otherwise it appeared on the right.
*
*/
@Override
public void ternaryOperations(Operator op, MatrixIndexes ix1, double scalarThat,
boolean left, int brlen, HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
CTable ctable = CTable.getCTableFnObject();
double w = scalarThat;
int offset = (int) ((ix1.getRowIndex()-1)*brlen);
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
if( resultBlock == null) {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
if( left )
ctable.execute(offset+i+1, v1, w, false, resultMap);
else
ctable.execute(v1, offset+i+1, w, false, resultMap);
}
}
else {
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
if( left )
ctable.execute(offset+i+1, v1, w, false, resultBlock);
else
ctable.execute(v1, offset+i+1, w, false, resultBlock);
}
resultBlock.recomputeNonZeros();
}
}
/**
* D = ctable(A,B,w)
* this <- A; that <- B; scalar_that2 <- w; result <- D
*
* (i1,j1,v1) from input1 (this)
* (i1,j1,v2) from input2 (that)
* (w) from scalar_input3 (scalarThat2)
*
* NOTE: This method supports both vectors and matrices. In case of matrices and ignoreZeros=true
* we can also use a sparse-safe implementation
*/
@Override
public void ternaryOperations(Operator op, MatrixValue thatVal, double scalarThat2, boolean ignoreZeros,
HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//setup ctable computation
MatrixBlock that = checkType(thatVal);
CTable ctable = CTable.getCTableFnObject();
double w = scalarThat2;
if( ignoreZeros //SPARSE-SAFE & SPARSE INPUTS
&& this.sparse && that.sparse )
{
//note: only used if both inputs have aligned zeros, which
//allows us to infer that the nnz both inputs are equivalent
//early abort on empty blocks possible
if( this.isEmptyBlock(false) && that.isEmptyBlock(false) )
return;
SparseRow[] a = this.sparseRows;
SparseRow[] b = that.sparseRows;
for( int i=0; i<rlen; i++ )
{
SparseRow arow = a[i];
SparseRow brow = b[i];
if( arow != null && !arow.isEmpty() )
{
int alen = arow.size();
double[] avals = arow.getValueContainer();
double[] bvals = brow.getValueContainer();
if( resultBlock == null ) {
for( int j=0; j<alen; j++ )
ctable.execute(avals[j], bvals[j], w, ignoreZeros, resultMap);
}
else {
for( int j=0; j<alen; j++ )
ctable.execute(avals[j], bvals[j], w, ignoreZeros, resultBlock);
}
}
}
}
else //SPARSE-UNSAFE | GENERIC INPUTS
{
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double v2 = that.quickGetValue(i, j);
if( resultBlock == null )
ctable.execute(v1, v2, w, ignoreZeros, resultMap);
else
ctable.execute(v1, v2, w, ignoreZeros, resultBlock);
}
}
//maintain nnz (if necessary)
if( resultBlock!=null )
resultBlock.recomputeNonZeros();
}
/**
* D = ctable(seq,A,w)
* this <- seq; thatMatrix <- A; thatScalar <- w; result <- D
*
* (i1,j1,v1) from input1 (this)
* (i1,j1,v2) from input2 (that)
* (w) from scalar_input3 (scalarThat2)
*/
public void ternaryOperations(Operator op, MatrixValue thatMatrix, double thatScalar, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that = checkType(thatMatrix);
CTable ctable = CTable.getCTableFnObject();
double w = thatScalar;
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
//resultBlock guaranteed to be allocated for ctableexpand
//each row in resultBlock will be allocated and will contain exactly one value
int maxCol = 0;
for( int i=0; i<rlen; i++ ) {
double v2 = that.quickGetValue(i, 0);
maxCol = ctable.execute(i+1, v2, w, maxCol, resultBlock);
}
//update meta data (initially unknown number of columns)
//note: nnz maintained in ctable (via quickset)
resultBlock.clen = maxCol;
}
/**
* D = ctable(A,B,W)
* this <- A; that <- B; that2 <- W; result <- D
*
* (i1,j1,v1) from input1 (this)
* (i1,j1,v2) from input2 (that)
* (i1,j1,w) from input3 (that2)
*/
public void ternaryOperations(Operator op, MatrixValue thatVal, MatrixValue that2Val, HashMap<MatrixIndexes, Double> ctableResult)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
ternaryOperations(op, thatVal, that2Val, ctableResult, null);
}
@Override
public void ternaryOperations(Operator op, MatrixValue thatVal, MatrixValue that2Val, HashMap<MatrixIndexes, Double> resultMap, MatrixBlock resultBlock)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlock that = checkType(thatVal);
MatrixBlock that2 = checkType(that2Val);
CTable ctable = CTable.getCTableFnObject();
//sparse-unsafe ctable execution
//(because input values of 0 are invalid and have to result in errors)
if(resultBlock == null)
{
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double v2 = that.quickGetValue(i, j);
double w = that2.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultMap);
}
}
else
{
for( int i=0; i<rlen; i++ )
for( int j=0; j<clen; j++ )
{
double v1 = this.quickGetValue(i, j);
double v2 = that.quickGetValue(i, j);
double w = that2.quickGetValue(i, j);
ctable.execute(v1, v2, w, false, resultBlock);
}
resultBlock.recomputeNonZeros();
}
}
@Override
public MatrixValue quaternaryOperations(Operator op, MatrixValue um, MatrixValue vm, MatrixValue wm, MatrixValue out, WeightsType wt)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//check input dimensions
if( getNumRows() != um.getNumRows() )
throw new DMLRuntimeException("Dimension mismatch rows on wsloss: "+getNumRows()+"!="+um.getNumRows());
if( getNumColumns() != vm.getNumRows() )
throw new DMLRuntimeException("Dimension mismatch columns on wsloss: "+getNumRows()+"!="+vm.getNumRows());
//check input data types
MatrixBlock X = this;
MatrixBlock W = (wt!=WeightsType.NONE)?checkType(wm):null;
MatrixBlock U = checkType(um);
MatrixBlock V = checkType(vm);
MatrixBlock R = checkType(out);
//prepare intermediates and output
R.reset(1, 1, false);
//core block computation
LibMatrixMult.matrixMultWSLoss(X, U, V, W, R, wt);
return R;
}
////////
// Data Generation Methods
// (rand, sequence)
/**
* Function to generate the random matrix with specified dimensions (block sizes are not specified).
*
* @param rows
* @param cols
* @param sparsity
* @param min
* @param max
* @param pdf
* @param seed
* @return
* @throws DMLRuntimeException
*/
public static MatrixBlock randOperations(int rows, int cols, double sparsity, double min, double max, String pdf, long seed)
throws DMLRuntimeException
{
DMLConfig conf = ConfigurationManager.getConfig();
int blocksize = (conf!=null) ? ConfigurationManager.getConfig().getIntValue(DMLConfig.DEFAULT_BLOCK_SIZE)
: DMLTranslator.DMLBlockSize;
return randOperations(
rows, cols, blocksize, blocksize,
sparsity, min, max, pdf, seed);
}
/**
* Function to generate the random matrix with specified dimensions and block dimensions.
* @param rows
* @param cols
* @param rowsInBlock
* @param colsInBlock
* @param sparsity
* @param min
* @param max
* @param pdf
* @param seed
* @return
* @throws DMLRuntimeException
*/
public static MatrixBlock randOperations(int rows, int cols, int rowsInBlock, int colsInBlock, double sparsity, double min, double max, String pdf, long seed)
throws DMLRuntimeException
{
MatrixBlock out = new MatrixBlock();
Well1024a bigrand = null;
long[] nnzInBlock = null;
//setup seeds and nnz per block
if( !LibMatrixDatagen.isShortcutRandOperation(min, max, sparsity, pdf) ){
bigrand = LibMatrixDatagen.setupSeedsForRand(seed);
nnzInBlock = LibMatrixDatagen.computeNNZperBlock(rows, cols, rowsInBlock, colsInBlock, sparsity);
}
//generate rand data
if ( pdf.equalsIgnoreCase(LibMatrixDatagen.RAND_PDF_NORMAL) ) {
// for normally distributed values, min and max are specified as an invalid value NaN.
out.randOperationsInPlace(pdf, rows, cols, rowsInBlock, colsInBlock, nnzInBlock, sparsity, Double.NaN, Double.NaN, bigrand, -1);
}
else {
out.randOperationsInPlace(pdf, rows, cols, rowsInBlock, colsInBlock, nnzInBlock, sparsity, min, max, bigrand, -1);
}
return out;
}
/**
* Function to generate a matrix of random numbers. This is invoked both
* from CP as well as from MR. In case of CP, it generates an entire matrix
* block-by-block. A <code>bigrand</code> is passed so that block-level
* seeds are generated internally. In case of MR, it generates a single
* block for given block-level seed <code>bSeed</code>.
*
* When pdf="uniform", cell values are drawn from uniform distribution in
* range <code>[min,max]</code>.
*
* When pdf="normal", cell values are drawn from standard normal
* distribution N(0,1). The range of generated values will always be
* (-Inf,+Inf).
*
* @param pdf
* @param rows
* @param cols
* @param rowsInBlock
* @param colsInBlock
* @param sparsity
* @param min
* @param max
* @param bigrand
* @param bSeed
* @return
* @throws DMLRuntimeException
*/
public MatrixBlock randOperationsInPlace(String pdf, int rows, int cols, int rowsInBlock, int colsInBlock, long[] nnzInBlock, double sparsity, double min, double max, Well1024a bigrand, long bSeed)
throws DMLRuntimeException
{
LibMatrixDatagen.generateRandomMatrix( this, pdf, rows, cols, rowsInBlock, colsInBlock,
nnzInBlock, sparsity, min, max, bigrand, bSeed );
return this;
}
/**
* Method to generate a sequence according to the given parameters. The
* generated sequence is always in dense format.
*
* Both end points specified <code>from</code> and <code>to</code> must be
* included in the generated sequence i.e., [from,to] both inclusive. Note
* that, <code>to</code> is included only if (to-from) is perfectly
* divisible by <code>incr</code>.
*
* For example, seq(0,1,0.5) generates (0.0 0.5 1.0)
* whereas seq(0,1,0.6) generates (0.0 0.6) but not (0.0 0.6 1.0)
*
* @param from
* @param to
* @param incr
* @return
* @throws DMLRuntimeException
*/
public static MatrixBlock seqOperations(double from, double to, double incr)
throws DMLRuntimeException
{
MatrixBlock out = new MatrixBlock();
LibMatrixDatagen.generateSequence( out, from, to, incr );
return out;
}
/**
*
* @param from
* @param to
* @param incr
* @return
* @throws DMLRuntimeException
*/
public MatrixBlock seqOperationsInPlace(double from, double to, double incr)
throws DMLRuntimeException
{
LibMatrixDatagen.generateSequence( this, from, to, incr );
return this;
}
////////
// Misc methods
private static MatrixBlock checkType(MatrixValue block) throws DMLUnsupportedOperationException
{
if( block!=null && !(block instanceof MatrixBlock))
throw new DMLUnsupportedOperationException("the Matrix Value is not MatrixBlockDSM!");
return (MatrixBlock) block;
}
public void print()
{
System.out.println("sparse = "+sparse);
if(!sparse)
System.out.println("nonzeros = "+nonZeros);
for(int i=0; i<rlen; i++)
{
for(int j=0; j<clen; j++)
{
System.out.print(quickGetValue(i, j)+"\t");
}
System.out.println();
}
}
@Override
public int compareTo(Object arg0) {
throw new RuntimeException("CompareTo should never be called for matrix blocks.");
}
@Override
public boolean equals(Object arg0) {
throw new RuntimeException("Equals should never be called for matrix blocks.");
}
@Override
public int hashCode() {
throw new RuntimeException("HashCode should never be called for matrix blocks.");
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append("sparse? = ");
sb.append(sparse);
sb.append("\n");
sb.append("nonzeros = ");
sb.append(nonZeros);
sb.append("\n");
sb.append("size: ");
sb.append(rlen);
sb.append(" X ");
sb.append(clen);
sb.append("\n");
if(sparse)
{
int len=0;
if(sparseRows!=null)
len = Math.min(rlen, sparseRows.length);
int i=0;
for(; i<len; i++)
{
sb.append("row +");
sb.append(i);
sb.append(": ");
sb.append(sparseRows[i]);
sb.append("\n");
}
for(; i<rlen; i++)
{
sb.append("row +");
sb.append(i);
sb.append(": null\n");
}
}
else
{
if(denseBlock!=null)
{
for(int i=0, ix=0; i<rlen; i++, ix+=clen) {
for(int j=0; j<clen; j++) {
sb.append(this.denseBlock[ix+j]);
sb.append("\t");
}
sb.append("\n");
}
}
}
return sb.toString();
}
///////////////////////////
// Helper classes
public static class SparsityEstimate
{
public long estimatedNonZeros=0;
public boolean sparse=false;
public SparsityEstimate(boolean sps, long nnzs)
{
sparse=sps;
estimatedNonZeros=nnzs;
}
public SparsityEstimate(){}
}
}
| 89826: SystemML Engine - Fix read sparse block (preserve read nnz on allocate sparse rows for general case)
| SystemML/SystemML/src/main/java/com/ibm/bi/dml/runtime/matrix/data/MatrixBlock.java | 89826: SystemML Engine - Fix read sparse block (preserve read nnz on allocate sparse rows for general case) |
|
Java | apache-2.0 | bae78008e67623dead92095cb1cd53b9032107e2 | 0 | difi/sikker-digital-post-klient-java,difi/sikker-digital-post-java-klient | /**
* Copyright (C) Posten Norge AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.difi.sdp.client2.integrationtests;
import no.difi.sdp.client2.KlientKonfigurasjon;
import no.difi.sdp.client2.SikkerDigitalPostKlient;
import no.difi.sdp.client2.domain.Forsendelse;
import no.difi.sdp.client2.domain.Noekkelpar;
import no.difi.sdp.client2.domain.Prioritet;
import no.difi.sdp.client2.domain.TekniskAvsender;
import no.difi.sdp.client2.domain.kvittering.ForretningsKvittering;
import no.difi.sdp.client2.domain.kvittering.KvitteringForespoersel;
import no.difi.sdp.client2.domain.kvittering.LeveringsKvittering;
import org.bouncycastle.asn1.x500.RDN;
import org.bouncycastle.asn1.x500.X500Name;
import org.bouncycastle.asn1.x500.style.BCStyle;
import org.bouncycastle.asn1.x500.style.IETFUtils;
import org.bouncycastle.cert.jcajce.JcaX509CertificateHolder;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.core.io.ClassPathResource;
import java.io.IOException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static java.lang.Thread.sleep;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.fest.assertions.api.Assertions.fail;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class SikkerDigitalPostKlientIT {
private static SikkerDigitalPostKlient postklient;
private static String MpcId;
private static String OrgNumber;
private static KeyStore keyStore;
@BeforeClass
public static void setUp() {
MpcId = UUID.randomUUID().toString();
populateOrgNumberFromCertificate();
KlientKonfigurasjon klientKonfigurasjon = KlientKonfigurasjon.builder()
.meldingsformidlerRoot("https://qaoffentlig.meldingsformidler.digipost.no/api/ebms")
.connectionTimeout(20, TimeUnit.SECONDS)
.build();
TekniskAvsender avsender = ObjectMother.tekniskAvsenderMedSertifikat(OrgNumber,avsenderNoekkelpar());
postklient = new SikkerDigitalPostKlient(avsender, klientKonfigurasjon);
}
private static Noekkelpar avsenderNoekkelpar() {
if(keyStore == null)
initKeyStore();
String alias = "virksomhetssertifikat";
String passphrase = System.getenv("smoketest_passphrase");
if(passphrase == null){
throw new RuntimeException(
"Klarte ikke hente ut system env variabelen 'smoketest_passphrase'.\n "+
"Sett sertifikatpassordet i en env variabel: \n" +
" export smoketest_passphrase=PASSPHRASE \n" +
"Hvis du debugger må env_variabel settes i test run configuration."
);
}
return Noekkelpar.fraKeyStore(keyStore, alias, passphrase);
}
private static void populateOrgNumberFromCertificate(){
String klarteIkkeFinneVirksomhetsSertifikatet = "Klarte ikke hente ut virksomhetssertifikatet fra keystoren. \n";
String oppsett = "For å kjøre integrasjonstester må det importeres et gyldig virksomhetssertifikat. \n"+
"1) Hent alias(siste avsnitt, første del før komma): \n" +
" keytool -list -keystore VIRKSOMHETSSERTIFIKAT.p12 -storetype pkcs12 \n"+
"2) Importer sertifikatet i keystore: \n" +
" keytool -v -importkeystore -srckeystore \"VIRKSOMHETSSERTIFIKAT.p12\" -srcstoretype PKCS12 -srcalias \"ALIAS\" -destalias \"virksomhetssertifikat\" -destkeystore \"src/integration-test/resources/SmokeTests.jceks\" -deststoretype jceks -storepass sophisticatedpassword \n"+
"3) Sett sertifikatpassordet i en env variabel: \n"+
" export smoketest_passphrase=PASSPHRASE";
if(keyStore == null)
initKeyStore();
try {
X509Certificate cert = (X509Certificate) keyStore.getCertificate("virksomhetssertifikat");
if(cert == null){
throw new RuntimeException(klarteIkkeFinneVirksomhetsSertifikatet + oppsett);
}
X500Name x500name = new JcaX509CertificateHolder(cert).getSubject();
RDN serialnumber = x500name.getRDNs(BCStyle.SN)[0];
OrgNumber = IETFUtils.valueToString(serialnumber.getFirst().getValue());
} catch (CertificateEncodingException e) {
throw new RuntimeException("Klarte ikke hente ut organisasjonsnummer fra sertifikatet.");
} catch (KeyStoreException e) {
throw new RuntimeException(klarteIkkeFinneVirksomhetsSertifikatet + oppsett);
}
}
private static void initKeyStore(){
try {
String keystorePass = "sophisticatedpassword";
String keyStoreFile = "/SmokeTests.jceks";
keyStore = KeyStore.getInstance("JCEKS");
keyStore.load(new ClassPathResource(keyStoreFile).getInputStream(), keystorePass.toCharArray());
}
catch (Exception e) {
throw new RuntimeException("Kunne ikke initiere keystoren. Prøv å sjekk ut keystoren igjen og start på nytt. ", e);
}
}
@Test
public void A_send_digital_forsendelse() {
Forsendelse forsendelse = null;
try {
forsendelse = ObjectMother.forsendelse(OrgNumber, MpcId,new ClassPathResource("/test.pdf").getInputStream());
} catch (IOException e) {
fail("klarte ikke åpne hoveddokument.");
}
postklient.send(forsendelse);
}
@Test
public void B_test_hent_kvittering() throws InterruptedException {
KvitteringForespoersel kvitteringForespoersel = KvitteringForespoersel.builder(Prioritet.PRIORITERT).mpcId(MpcId).build();
ForretningsKvittering forretningsKvittering = null;
sleep(1000);//wait 1 sec until first try.
for (int i = 0; i < 10; i++) {
forretningsKvittering = postklient.hentKvittering(kvitteringForespoersel);
if (forretningsKvittering != null) {
System.out.println("Kvittering!");
System.out.println(String.format("%s: %s, %s, %s, %s", forretningsKvittering.getClass().getSimpleName(), forretningsKvittering.getKonversasjonsId(), forretningsKvittering.getRefToMessageId(), forretningsKvittering.getTidspunkt(), forretningsKvittering));
assertThat(forretningsKvittering.getKonversasjonsId()).isNotEmpty();
assertThat(forretningsKvittering.getRefToMessageId()).isNotEmpty();
assertThat(forretningsKvittering.getTidspunkt()).isNotNull();
assertThat(forretningsKvittering).isInstanceOf(LeveringsKvittering.class);
postklient.bekreft(forretningsKvittering);
break;
}
else {
System.out.println("Ingen kvittering");
sleep(1000);
}
}
assertThat(forretningsKvittering != null).isTrue();
}
}
| src/integration-test/java/no.difi.sdp.client2.integrationtests/SikkerDigitalPostKlientIT.java | /**
* Copyright (C) Posten Norge AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.difi.sdp.client2.integrationtests;
import no.difi.sdp.client2.KlientKonfigurasjon;
import no.difi.sdp.client2.SikkerDigitalPostKlient;
import no.difi.sdp.client2.domain.Forsendelse;
import no.difi.sdp.client2.domain.Noekkelpar;
import no.difi.sdp.client2.domain.Prioritet;
import no.difi.sdp.client2.domain.TekniskAvsender;
import no.difi.sdp.client2.domain.kvittering.ForretningsKvittering;
import no.difi.sdp.client2.domain.kvittering.KvitteringForespoersel;
import no.difi.sdp.client2.domain.kvittering.LeveringsKvittering;
import org.bouncycastle.asn1.x500.RDN;
import org.bouncycastle.asn1.x500.X500Name;
import org.bouncycastle.asn1.x500.style.BCStyle;
import org.bouncycastle.asn1.x500.style.IETFUtils;
import org.bouncycastle.cert.jcajce.JcaX509CertificateHolder;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.core.env.Environment;
import org.springframework.core.io.ClassPathResource;
import java.io.IOException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static java.lang.Thread.sleep;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.fest.assertions.api.Assertions.fail;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class SikkerDigitalPostKlientIT {
private static SikkerDigitalPostKlient postklient;
private static String MpcId;
private static String OrgNumber;
private static KeyStore keyStore;
private static Noekkelpar avsenderNoekkelpar() {
if(keyStore == null)
initKeyStore();
String alias = "virksomhetssertifikat";
String passphrase = System.getenv("smoketest_passphrase");
if(passphrase == null){
throw new RuntimeException(
"Klarte ikke hente ut system env variabelen 'smoketest_passphrase'.\n "+
"Sett sertifikatpassordet i en env variabel: \n" +
" export smoketest_passphrase=PASSPHRASE");
}
return Noekkelpar.fraKeyStore(keyStore, alias, passphrase);
}
private static void populateOrgNumberFromCertificate(){
String klarteIkkeFinneVirksomhetsSertifikatet = "Klarte ikke hente ut virksomhetssertifikatet fra keystoren. \n";
String oppsett = "For å kjøre integrasjonstester må det importeres et gyldig virksomhetssertifikat. \n"+
"1) Hent alias(siste avsnitt, første del før komma): \n" +
" keytool -list -keystore VIRKSOMHETSSERTIFIKAT.p12 -storetype pkcs12 \n"+
"2) Importer sertifikatet i keystore: \n" +
" keytool -v -importkeystore -srckeystore \"VIRKSOMHETSSERTIFIKAT.p12\" -srcstoretype PKCS12 -srcalias \"ALIAS\" -destalias \"virksomhetssertifikat\" -destkeystore \"src/integration-test/resources/SmokeTests.jceks\" -deststoretype jceks -storepass sophisticatedpassword \n"+
"3) Sett sertifikatpassordet i en env variabel: \n"+
" export smoketest_passphrase=PASSPHRASE";
if(keyStore == null)
initKeyStore();
try {
X509Certificate cert = (X509Certificate) keyStore.getCertificate("virksomhetssertifikat");
if(cert == null){
throw new RuntimeException(klarteIkkeFinneVirksomhetsSertifikatet + oppsett);
}
X500Name x500name = new JcaX509CertificateHolder(cert).getSubject();
RDN serialnumber = x500name.getRDNs(BCStyle.SN)[0];
OrgNumber = IETFUtils.valueToString(serialnumber.getFirst().getValue());
} catch (CertificateEncodingException e) {
throw new RuntimeException("Klarte ikke hente ut organisasjonsnummer fra sertifikatet.");
} catch (KeyStoreException e) {
throw new RuntimeException(klarteIkkeFinneVirksomhetsSertifikatet + oppsett);
}
}
private static void initKeyStore(){
try {
String keystorePass = "sophisticatedpassword";
String keyStoreFile = "/SmokeTests.jceks";
keyStore = KeyStore.getInstance("JCEKS");
keyStore.load(new ClassPathResource(keyStoreFile).getInputStream(), keystorePass.toCharArray());
}
catch (Exception e) {
throw new RuntimeException("Kunne ikke initiere keystoren. Prøv å sjekk ut keystoren igjen og start på nytt. ", e);
}
}
@BeforeClass
public static void setUp() {
MpcId = UUID.randomUUID().toString();
populateOrgNumberFromCertificate();
KlientKonfigurasjon klientKonfigurasjon = KlientKonfigurasjon.builder()
.meldingsformidlerRoot("https://qaoffentlig.meldingsformidler.digipost.no/api/ebms")
.connectionTimeout(20, TimeUnit.SECONDS)
.build();
TekniskAvsender avsender = ObjectMother.tekniskAvsenderMedSertifikat(OrgNumber,avsenderNoekkelpar());
postklient = new SikkerDigitalPostKlient(avsender, klientKonfigurasjon);
}
@Test
public void A_send_digital_forsendelse() {
Forsendelse forsendelse = null;
try {
forsendelse = ObjectMother.forsendelse(OrgNumber, MpcId,new ClassPathResource("/test.pdf").getInputStream());
} catch (IOException e) {
fail("klarte ikke åpne hoveddokument.");
}
postklient.send(forsendelse);
}
@Test
public void B_test_hent_kvittering() throws InterruptedException {
KvitteringForespoersel kvitteringForespoersel = KvitteringForespoersel.builder(Prioritet.PRIORITERT).mpcId(MpcId).build();
ForretningsKvittering forretningsKvittering = null;
sleep(1000);//wait 1 sec until first try.
for (int i = 0; i < 10; i++) {
forretningsKvittering = postklient.hentKvittering(kvitteringForespoersel);
if (forretningsKvittering != null) {
System.out.println("Kvittering!");
System.out.println(String.format("%s: %s, %s, %s, %s", forretningsKvittering.getClass().getSimpleName(), forretningsKvittering.getKonversasjonsId(), forretningsKvittering.getRefToMessageId(), forretningsKvittering.getTidspunkt(), forretningsKvittering));
assertThat(forretningsKvittering.getKonversasjonsId()).isNotEmpty();
assertThat(forretningsKvittering.getRefToMessageId()).isNotEmpty();
assertThat(forretningsKvittering.getTidspunkt()).isNotNull();
assertThat(forretningsKvittering).isInstanceOf(LeveringsKvittering.class);
postklient.bekreft(forretningsKvittering);
break;
}
else {
System.out.println("Ingen kvittering");
sleep(1000);
}
}
assertThat(forretningsKvittering != null).isTrue();
}
}
| Code cleanup
| src/integration-test/java/no.difi.sdp.client2.integrationtests/SikkerDigitalPostKlientIT.java | Code cleanup |
|
Java | apache-2.0 | 1b719cb3fe07142e2da24291d1bad2e689a99f12 | 0 | SahaginOrg/sahagin-java,SahaginOrg/sahagin-java | package org.sahagin.report;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.output.FileWriterWithEncoding;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.Velocity;
import org.apache.velocity.app.VelocityEngine;
import org.openqa.selenium.io.IOUtils;
import org.sahagin.share.CommonPath;
import org.sahagin.share.CommonUtils;
import org.sahagin.share.IllegalDataStructureException;
import org.sahagin.share.runresults.LineScreenCapture;
import org.sahagin.share.runresults.RootFuncRunResult;
import org.sahagin.share.runresults.RunFailure;
import org.sahagin.share.runresults.RunResults;
import org.sahagin.share.runresults.StackLine;
import org.sahagin.share.srctree.SrcTree;
import org.sahagin.share.srctree.TestFunction;
import org.sahagin.share.srctree.TestMethod;
import org.sahagin.share.srctree.code.CodeLine;
import org.sahagin.share.srctree.code.SubFunctionInvoke;
import org.sahagin.share.yaml.YamlConvertException;
import org.sahagin.share.yaml.YamlUtils;
//TODO support TestDoc annotation for Enumerate type
//TODO support method optional argument
public class HtmlReport {
public HtmlReport() {
// stop generating velocity.log
Velocity.setProperty(VelocityEngine.RUNTIME_LOG_LOGSYSTEM_CLASS,
"org.apache.velocity.runtime.log.NullLogSystem");
Velocity.init();
}
private String generateTtId(List<StackLine> stackLines) {
if (stackLines.size() == 0) {
throw new IllegalArgumentException("empty stackLines");
}
String ttId = "";
for (int i = stackLines.size() - 1; i >= 0; i--) {
if (i != stackLines.size() - 1) {
ttId = ttId + "_";
}
ttId = ttId + Integer.toString(stackLines.get(i).getCodeBodyIndex());
}
return ttId;
}
// generate ResportScreenCapture list from lineScreenCaptures and
private List<ReportScreenCapture> generateReportScreenCaptures(
List<LineScreenCapture> lineScreenCaptures,
File inputCaptureRootDir, File reportOutputDir, File funcReportParentDir) {
List<ReportScreenCapture> reportCaptures
= new ArrayList<ReportScreenCapture>(lineScreenCaptures.size());
// add noImage capture
String noImageFilePath = new File(CommonUtils.relativize(
CommonPath.htmlExternalResourceRootDir(reportOutputDir), funcReportParentDir),
"images/noImage.png").getPath();
ReportScreenCapture noImageCapture = new ReportScreenCapture();
noImageCapture.setPath(noImageFilePath);
noImageCapture.setTtId("noImage");
reportCaptures.add(noImageCapture);
// add each line screen capture
for (LineScreenCapture lineScreenCapture : lineScreenCaptures) {
ReportScreenCapture reportCapture = new ReportScreenCapture();
File relInputCapturePath = CommonUtils.relativize(
lineScreenCapture.getPath(), inputCaptureRootDir);
File absOutputCapturePath = new File(
CommonPath.htmlReportCaptureRootDir(reportOutputDir), relInputCapturePath.getPath());
File relOutputCapturePath = CommonUtils.relativize(absOutputCapturePath, funcReportParentDir);
reportCapture.setPath(relOutputCapturePath.getPath());
String ttId = generateTtId(lineScreenCapture.getStackLines());
reportCapture.setTtId(ttId);
reportCaptures.add(reportCapture);
}
return reportCaptures;
}
// returns null if no failure
private RunFailure getRunFailure(RootFuncRunResult runResult) {
if (runResult == null || runResult.getRunFailures().size() == 0) {
return null; // no failure
}
// multiple run failures in one test method are not supported yet
return runResult.getRunFailures().get(0);
}
private int getErrorCodeBodyIndex(RunFailure failure,
int stackLineHeight, TestFunction expectedStackLineFunction) {
if (failure == null) {
return -1;
}
if (stackLineHeight < 0) {
throw new IllegalArgumentException(Integer.toString(stackLineHeight));
}
if (stackLineHeight >= failure.getStackLines().size()) {
return -1;
}
StackLine stackLine = failure.getStackLines().get(
failure.getStackLines().size() - 1 - stackLineHeight);
if (stackLine.getFunction() == null) {
throw new NullPointerException("implementation error");
}
if (!stackLine.getFunction().getKey().equals(expectedStackLineFunction.getKey())) {
throw new IllegalArgumentException(
"function mismatch: " + expectedStackLineFunction.getKey());
}
return stackLine.getCodeBodyIndex();
}
private ReportCodeLine generateReportCodeLine(CodeLine codeLine, List<StackLine> stackLines,
String ttId, String parentTtId, int codeLineIndex, int errLineIndex) {
ReportCodeLine result = new ReportCodeLine();
result.setCodeLine(codeLine);
result.setStackLines(stackLines);
result.setTtId(ttId);
result.setParentTtId(parentTtId);
if (errLineIndex == -1 || errLineIndex > codeLineIndex) {
result.setHasError(false);
result.setAlreadyRun(true);
} else if (errLineIndex == codeLineIndex) {
result.setHasError(true);
result.setAlreadyRun(true);
} else if (errLineIndex < codeLineIndex) {
result.setHasError(false);
result.setAlreadyRun(false);
} else {
throw new RuntimeException("implementation error");
}
return result;
}
private StackLine generateStackLine(TestFunction function, String functionKey,
int codeBodyIndex, int line) {
StackLine result = new StackLine();
result.setFunction(function);
result.setFunctionKey(functionKey);
result.setCodeBodyIndex(codeBodyIndex);
result.setLine(line);
return result;
}
// runFailure... set null if not error
private List<ReportCodeLine> generateReportCodeBody(
TestFunction rootFunction, RunFailure runFailure) {
int rootErrIndex = getErrorCodeBodyIndex(runFailure, 0, rootFunction);
List<ReportCodeLine> result = new ArrayList<ReportCodeLine>(rootFunction.getCodeBody().size());
for (int i = 0; i < rootFunction.getCodeBody().size(); i++) {
CodeLine codeLine = rootFunction.getCodeBody().get(i);
String rootTtId = Integer.toString(i);
StackLine rootStackLine = generateStackLine(
rootFunction, rootFunction.getKey(), i, codeLine.getStartLine());
List<StackLine> rootStackLines = new ArrayList<StackLine>(1);
rootStackLines.add(rootStackLine);
ReportCodeLine reportCodeLine = generateReportCodeLine(
codeLine, rootStackLines, rootTtId, null, i, rootErrIndex);
result.add(reportCodeLine);
// add direct child to HTML report
if (codeLine.getCode() instanceof SubFunctionInvoke) {
SubFunctionInvoke invoke = (SubFunctionInvoke) codeLine.getCode();
List<CodeLine> codeBody = invoke.getSubFunction().getCodeBody();
int errIndex = -1;
if (reportCodeLine.hasError()) {
errIndex = getErrorCodeBodyIndex(runFailure, 1, invoke.getSubFunction());
}
for (int j = 0; j < codeBody.size(); j++) {
CodeLine childCodeLine = codeBody.get(j);
StackLine childStackLine = generateStackLine(invoke.getSubFunction(),
invoke.getSubFunctionKey(), j, childCodeLine.getStartLine());
List<StackLine> childStackLines = new ArrayList<StackLine>(2);
childStackLines.add(childStackLine);
childStackLines.add(rootStackLine);
ReportCodeLine childReportCodeLine = generateReportCodeLine(
childCodeLine, childStackLines, rootTtId + "_" + j, rootTtId,
j, errIndex);
result.add(childReportCodeLine);
}
}
}
return result;
}
private SrcTree generateSrcTree(File reportInputDataDir)
throws IllegalDataStructureException {
// generate srcTree from YAML file
Map<String, Object> yamlObj = YamlUtils.load(
CommonPath.srcTreeFile(reportInputDataDir));
SrcTree srcTree = new SrcTree();
try {
srcTree.fromYamlObject(yamlObj);
} catch (YamlConvertException e) {
throw new IllegalDataStructureException(e);
}
srcTree.resolveKeyReference();
return srcTree;
}
private RunResults generateRunResults(File reportInputDataDir, SrcTree srcTree)
throws IllegalDataStructureException {
RunResults results = new RunResults();
Collection<File> runResultFiles;
File runResultsRootDir = CommonPath.runResultRootDir(reportInputDataDir);
if (runResultsRootDir.exists()) {
runResultFiles = FileUtils.listFiles(runResultsRootDir, null, true);
} else {
runResultFiles = new ArrayList<File>(0);
}
for (File runResultFile : runResultFiles) {
Map<String, Object> runResultYamlObj = YamlUtils.load(runResultFile);
RootFuncRunResult rootFuncRunResult = new RootFuncRunResult();
try {
rootFuncRunResult.fromYamlObject(runResultYamlObj);
} catch (YamlConvertException e) {
throw new IllegalDataStructureException(e);
}
results.addRootFuncRunResults(rootFuncRunResult);
}
results.resolveKeyReference(srcTree);
return results;
}
private void deleteDirIfExists(File dir) {
if (dir.exists()) {
try {
FileUtils.deleteDirectory(dir);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
// each report HTML file is {methodQualifiedParentPath}/{methodSimpleName}.html
public void generate(File reportInputDataDir, File reportOutputDir)
throws IllegalDataStructureException {
deleteDirIfExists(reportOutputDir); // delete previous execution output
SrcTree srcTree = generateSrcTree(reportInputDataDir);
RunResults runResults = generateRunResults(reportInputDataDir, srcTree);
File htmlExternalResRootDir = CommonPath.htmlExternalResourceRootDir(reportOutputDir);
// generate src-tree-yaml.js
String srcTreeYamlStr;
try {
srcTreeYamlStr = FileUtils.readFileToString(CommonPath.srcTreeFile(reportInputDataDir), "UTF-8");
} catch (IOException e) {
throw new RuntimeException(e);
}
VelocityContext srcTreeContext = new VelocityContext();
srcTreeContext.put("yamlStr", srcTreeYamlStr);
File srcTreeYamlJsFile = new File(htmlExternalResRootDir, "js/report/src-tree-yaml.js");
generateVelocityOutput(srcTreeContext, "/template/src-tree-yaml.js.vm", srcTreeYamlJsFile);
// set up HTML external files
// TODO all file paths are hard coded. this is very poor logic..
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/common-utils.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/yaml/js-yaml.min.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/yaml/yaml-utils.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/code.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/string-code.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/unknown-code.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/sub-function-invoke.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/sub-method-invoke.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/code-line.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-class.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/page-class.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-function.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-method.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-func-table.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-class-table.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/src-tree.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/testdoc-resolver.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/fonts/flexslider-icon.eot");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/fonts/flexslider-icon.svg");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/fonts/flexslider-icon.ttf");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/fonts/flexslider-icon.woff");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/images/bx_loader.gif");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/images/controls.png");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/jquery.bxslider.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/jquery.treetable.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/jquery.treetable.theme.default.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/perfect-scrollbar.min.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/report.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "images/noImage.png");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/jquery-1.11.1.min.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/jquery.bxslider.min.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/jquery.treetable.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/perfect-scrollbar.min.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/report.js");
// copy screen captures to reportOutputDir
// TODO copying screen capture may be slow action
File inputCaptureRootDir = CommonPath.inputCaptureRootDir(reportInputDataDir);
File htmlReportCaptureRootDir = CommonPath.htmlReportCaptureRootDir(reportOutputDir);
try {
if (inputCaptureRootDir.exists()) {
FileUtils.copyDirectory(inputCaptureRootDir, htmlReportCaptureRootDir);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
List<TestFunction> testFunctions = srcTree.getRootFuncTable().getTestFunctions();
File reportMainDir = CommonPath.htmlReportMainFile(reportOutputDir).getParentFile();
List<ReportFuncLink> reportLinks = new ArrayList<ReportFuncLink>(testFunctions.size());
// generate each function report
for (TestFunction rootFunc : testFunctions) {
TestMethod method = (TestMethod) rootFunc;
File funcReportParentDir = new File(CommonPath.funcHtmlReportRootDir(reportOutputDir),
method.getTestClass().getQualifiedName());
funcReportParentDir.mkdirs();
VelocityContext funcContext = new VelocityContext();
if (rootFunc.getTestDoc() == null) {
funcContext.put("title", rootFunc.getSimpleName());
} else {
funcContext.put("title", rootFunc.getTestDoc());
}
funcContext.put("externalResourceRootDir", CommonUtils.relativize(
CommonPath.htmlExternalResourceRootDir(reportOutputDir), funcReportParentDir).getPath());
if (!(rootFunc instanceof TestMethod)) {
throw new RuntimeException("not supported yet: " + rootFunc);
}
funcContext.put("className", method.getTestClass().getQualifiedName());
funcContext.put("classTestDoc", method.getTestClass().getTestDoc());
funcContext.put("funcName", rootFunc.getSimpleName());
funcContext.put("funcTestDoc", rootFunc.getTestDoc());
RootFuncRunResult runResult = runResults.getRunResultByRootFunction(rootFunc);
List<LineScreenCapture> lineScreenCaptures;
if (runResult == null) {
lineScreenCaptures = new ArrayList<LineScreenCapture>(0);
} else {
lineScreenCaptures = runResult.getLineScreenCaptures();
}
RunFailure runFailure = getRunFailure(runResult);
if (runFailure == null) {
funcContext.put("errMsg", null);
} else {
funcContext.put("errMsg", runFailure.getMessage().trim());
}
List<ReportCodeLine> reportCodeBody = generateReportCodeBody(rootFunc, runFailure);
funcContext.put("codeBody", reportCodeBody);
List<ReportScreenCapture> captures = generateReportScreenCaptures(
lineScreenCaptures, inputCaptureRootDir, reportOutputDir, funcReportParentDir);
funcContext.put("captures", captures);
File funcReportFile = new File(funcReportParentDir, rootFunc.getSimpleName() + ".html");
generateVelocityOutput(funcContext, "/template/report.html.vm", funcReportFile);
// set reportLinks data
ReportFuncLink reportLink = new ReportFuncLink();
reportLink.setTitle(method.getQualifiedName());
reportLink.setPath(CommonUtils.relativize(funcReportFile, reportMainDir).getPath());
reportLinks.add(reportLink);
}
// generate main index.html report
VelocityContext mainContext = new VelocityContext();
mainContext.put("reportLinks", reportLinks);
generateVelocityOutput(mainContext, "/template/index.html.vm",
CommonPath.htmlReportMainFile(reportOutputDir));
}
private void generateVelocityOutput(
VelocityContext context, String templateResourcePath, File outputFile) {
outputFile.getParentFile().mkdirs();
InputStream in = null;
Reader reader = null;
FileWriterWithEncoding writer = null;
try {
in = this.getClass().getResourceAsStream(templateResourcePath);
reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
writer = new FileWriterWithEncoding(outputFile, "UTF-8");
Velocity.evaluate(context, writer, this.getClass().getSimpleName(), reader);
writer.close();
reader.close();
in.close();
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
IOUtils.closeQuietly(writer);
IOUtils.closeQuietly(reader);
IOUtils.closeQuietly(in);
}
}
private void extractHtmlExternalResFromThisJar(File htmlExternalResourceRootDir, String copyPath) {
InputStream in = this.getClass().getResourceAsStream("/" + copyPath);
File destFile = new File(htmlExternalResourceRootDir, copyPath);
destFile.getParentFile().mkdirs();
try {
FileUtils.copyInputStreamToFile(in, destFile);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| src/main/java/org/sahagin/report/HtmlReport.java | package org.sahagin.report;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.output.FileWriterWithEncoding;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.Velocity;
import org.apache.velocity.app.VelocityEngine;
import org.openqa.selenium.io.IOUtils;
import org.sahagin.share.CommonPath;
import org.sahagin.share.CommonUtils;
import org.sahagin.share.IllegalDataStructureException;
import org.sahagin.share.runresults.LineScreenCapture;
import org.sahagin.share.runresults.RootFuncRunResult;
import org.sahagin.share.runresults.RunFailure;
import org.sahagin.share.runresults.RunResults;
import org.sahagin.share.runresults.StackLine;
import org.sahagin.share.srctree.SrcTree;
import org.sahagin.share.srctree.TestFunction;
import org.sahagin.share.srctree.TestMethod;
import org.sahagin.share.srctree.code.CodeLine;
import org.sahagin.share.srctree.code.SubFunctionInvoke;
import org.sahagin.share.yaml.YamlConvertException;
import org.sahagin.share.yaml.YamlUtils;
//TODO support TestDoc annotation for Enumerate type
//TODO support method optional argument
public class HtmlReport {
public HtmlReport() {
// stop generating velocity.log
Velocity.setProperty(VelocityEngine.RUNTIME_LOG_LOGSYSTEM_CLASS,
"org.apache.velocity.runtime.log.NullLogSystem");
Velocity.init();
}
private LineScreenCapture getScreenCaptureOfStacks(
List<StackLine> stackLines, List<LineScreenCapture> lineScreenCaptures) {
for (LineScreenCapture lineScreenCapture : lineScreenCaptures) {
if (lineScreenCapture.matchesStackLines(stackLines)) {
return lineScreenCapture;
}
}
return null;
}
// search LineScreenCapture for each code line of reportCodeBody
// from lineScreenCaptures and generate ResportScreenCapture list.
private List<ReportScreenCapture> generateReportScreenCaptures(
List<ReportCodeLine> reportCodeBody, List<LineScreenCapture> lineScreenCaptures,
File inputCaptureRootDir, File reportOutputDir, File funcReportParentDir) {
List<ReportScreenCapture> reportCaptures
= new ArrayList<ReportScreenCapture>(reportCodeBody.size());
String noImageFilePath = new File(CommonUtils.relativize(
CommonPath.htmlExternalResourceRootDir(reportOutputDir), funcReportParentDir),
"images/noImage.png").getPath();
// add noImage line
ReportScreenCapture noImageCapture = new ReportScreenCapture();
noImageCapture.setTtId("noImage");
noImageCapture.setPath(noImageFilePath);
reportCaptures.add(noImageCapture);
for (int i = 0; i < reportCodeBody.size(); i++) {
LineScreenCapture capture = getScreenCaptureOfStacks(
reportCodeBody.get(i).getStackLines(), lineScreenCaptures);
ReportScreenCapture reportCapture = new ReportScreenCapture();
reportCapture.setTtId(reportCodeBody.get(i).getTtId());
if (capture == null) {
reportCapture.setPath(noImageFilePath);
} else {
File relInputCapturePath = CommonUtils.relativize(
capture.getPath(), inputCaptureRootDir);
File absOutputCapturePath = new File(
CommonPath.htmlReportCaptureRootDir(reportOutputDir), relInputCapturePath.getPath());
File relOutputCapturePath = CommonUtils.relativize(absOutputCapturePath, funcReportParentDir);
reportCapture.setPath(relOutputCapturePath.getPath());
}
reportCaptures.add(reportCapture);
}
return reportCaptures;
}
// returns null if no failure
private RunFailure getRunFailure(RootFuncRunResult runResult) {
if (runResult == null || runResult.getRunFailures().size() == 0) {
return null; // no failure
}
// multiple run failures in one test method are not supported yet
return runResult.getRunFailures().get(0);
}
private int getErrorCodeBodyIndex(RunFailure failure,
int stackLineHeight, TestFunction expectedStackLineFunction) {
if (failure == null) {
return -1;
}
if (stackLineHeight < 0) {
throw new IllegalArgumentException(Integer.toString(stackLineHeight));
}
if (stackLineHeight >= failure.getStackLines().size()) {
return -1;
}
StackLine stackLine = failure.getStackLines().get(
failure.getStackLines().size() - 1 - stackLineHeight);
if (stackLine.getFunction() == null) {
throw new NullPointerException("implementation error");
}
if (!stackLine.getFunction().getKey().equals(expectedStackLineFunction.getKey())) {
throw new IllegalArgumentException(
"function mismatch: " + expectedStackLineFunction.getKey());
}
return stackLine.getCodeBodyIndex();
}
private ReportCodeLine generateReportCodeLine(CodeLine codeLine, List<StackLine> stackLines,
String ttId, String parentTtId, int codeLineIndex, int errLineIndex) {
ReportCodeLine result = new ReportCodeLine();
result.setCodeLine(codeLine);
result.setStackLines(stackLines);
result.setTtId(ttId);
result.setParentTtId(parentTtId);
if (errLineIndex == -1 || errLineIndex > codeLineIndex) {
result.setHasError(false);
result.setAlreadyRun(true);
} else if (errLineIndex == codeLineIndex) {
result.setHasError(true);
result.setAlreadyRun(true);
} else if (errLineIndex < codeLineIndex) {
result.setHasError(false);
result.setAlreadyRun(false);
} else {
throw new RuntimeException("implementation error");
}
return result;
}
private StackLine generateStackLine(TestFunction function, String functionKey,
int codeBodyIndex, int line) {
StackLine result = new StackLine();
result.setFunction(function);
result.setFunctionKey(functionKey);
result.setCodeBodyIndex(codeBodyIndex);
result.setLine(line);
return result;
}
// runFailure... set null if not error
private List<ReportCodeLine> generateReportCodeBody(
TestFunction rootFunction, RunFailure runFailure) {
int rootErrIndex = getErrorCodeBodyIndex(runFailure, 0, rootFunction);
List<ReportCodeLine> result = new ArrayList<ReportCodeLine>(rootFunction.getCodeBody().size());
for (int i = 0; i < rootFunction.getCodeBody().size(); i++) {
CodeLine codeLine = rootFunction.getCodeBody().get(i);
String rootTtId = Integer.toString(i);
StackLine rootStackLine = generateStackLine(
rootFunction, rootFunction.getKey(), i, codeLine.getStartLine());
List<StackLine> rootStackLines = new ArrayList<StackLine>(1);
rootStackLines.add(rootStackLine);
ReportCodeLine reportCodeLine = generateReportCodeLine(
codeLine, rootStackLines, rootTtId, null, i, rootErrIndex);
result.add(reportCodeLine);
// add direct child to HTML report
if (codeLine.getCode() instanceof SubFunctionInvoke) {
SubFunctionInvoke invoke = (SubFunctionInvoke) codeLine.getCode();
List<CodeLine> codeBody = invoke.getSubFunction().getCodeBody();
int errIndex = -1;
if (reportCodeLine.hasError()) {
errIndex = getErrorCodeBodyIndex(runFailure, 1, invoke.getSubFunction());
}
for (int j = 0; j < codeBody.size(); j++) {
CodeLine childCodeLine = codeBody.get(j);
StackLine childStackLine = generateStackLine(invoke.getSubFunction(),
invoke.getSubFunctionKey(), j, childCodeLine.getStartLine());
List<StackLine> childStackLines = new ArrayList<StackLine>(2);
childStackLines.add(childStackLine);
childStackLines.add(rootStackLine);
ReportCodeLine childReportCodeLine = generateReportCodeLine(
childCodeLine, childStackLines, rootTtId + "_" + j, rootTtId,
j, errIndex);
result.add(childReportCodeLine);
}
}
}
return result;
}
private SrcTree generateSrcTree(File reportInputDataDir)
throws IllegalDataStructureException {
// generate srcTree from YAML file
Map<String, Object> yamlObj = YamlUtils.load(
CommonPath.srcTreeFile(reportInputDataDir));
SrcTree srcTree = new SrcTree();
try {
srcTree.fromYamlObject(yamlObj);
} catch (YamlConvertException e) {
throw new IllegalDataStructureException(e);
}
srcTree.resolveKeyReference();
return srcTree;
}
private RunResults generateRunResults(File reportInputDataDir, SrcTree srcTree)
throws IllegalDataStructureException {
RunResults results = new RunResults();
Collection<File> runResultFiles;
File runResultsRootDir = CommonPath.runResultRootDir(reportInputDataDir);
if (runResultsRootDir.exists()) {
runResultFiles = FileUtils.listFiles(runResultsRootDir, null, true);
} else {
runResultFiles = new ArrayList<File>(0);
}
for (File runResultFile : runResultFiles) {
Map<String, Object> runResultYamlObj = YamlUtils.load(runResultFile);
RootFuncRunResult rootFuncRunResult = new RootFuncRunResult();
try {
rootFuncRunResult.fromYamlObject(runResultYamlObj);
} catch (YamlConvertException e) {
throw new IllegalDataStructureException(e);
}
results.addRootFuncRunResults(rootFuncRunResult);
}
results.resolveKeyReference(srcTree);
return results;
}
private void deleteDirIfExists(File dir) {
if (dir.exists()) {
try {
FileUtils.deleteDirectory(dir);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
// each report HTML file is {methodQualifiedParentPath}/{methodSimpleName}.html
public void generate(File reportInputDataDir, File reportOutputDir)
throws IllegalDataStructureException {
deleteDirIfExists(reportOutputDir); // delete previous execution output
SrcTree srcTree = generateSrcTree(reportInputDataDir);
RunResults runResults = generateRunResults(reportInputDataDir, srcTree);
File htmlExternalResRootDir = CommonPath.htmlExternalResourceRootDir(reportOutputDir);
// generate src-tree-yaml.js
String srcTreeYamlStr;
try {
srcTreeYamlStr = FileUtils.readFileToString(CommonPath.srcTreeFile(reportInputDataDir), "UTF-8");
} catch (IOException e) {
throw new RuntimeException(e);
}
VelocityContext srcTreeContext = new VelocityContext();
srcTreeContext.put("yamlStr", srcTreeYamlStr);
File srcTreeYamlJsFile = new File(htmlExternalResRootDir, "js/report/src-tree-yaml.js");
generateVelocityOutput(srcTreeContext, "/template/src-tree-yaml.js.vm", srcTreeYamlJsFile);
// set up HTML external files
// TODO all file paths are hard coded. this is very poor logic..
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/common-utils.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/yaml/js-yaml.min.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/yaml/yaml-utils.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/code.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/string-code.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/unknown-code.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/sub-function-invoke.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/sub-method-invoke.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/code/code-line.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-class.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/page-class.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-function.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-method.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-func-table.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/test-class-table.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/srctree/src-tree.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/share/testdoc-resolver.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/fonts/flexslider-icon.eot");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/fonts/flexslider-icon.svg");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/fonts/flexslider-icon.ttf");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/fonts/flexslider-icon.woff");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/images/bx_loader.gif");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/images/controls.png");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/jquery.bxslider.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/jquery.treetable.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/jquery.treetable.theme.default.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/perfect-scrollbar.min.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "css/report.css");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "images/noImage.png");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/jquery-1.11.1.min.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/jquery.bxslider.min.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/jquery.treetable.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/perfect-scrollbar.min.js");
extractHtmlExternalResFromThisJar(htmlExternalResRootDir, "js/report/report.js");
// copy screen captures to reportOutputDir
// TODO copying screen capture may be slow action
File inputCaptureRootDir = CommonPath.inputCaptureRootDir(reportInputDataDir);
File htmlReportCaptureRootDir = CommonPath.htmlReportCaptureRootDir(reportOutputDir);
try {
if (inputCaptureRootDir.exists()) {
FileUtils.copyDirectory(inputCaptureRootDir, htmlReportCaptureRootDir);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
List<TestFunction> testFunctions = srcTree.getRootFuncTable().getTestFunctions();
File reportMainDir = CommonPath.htmlReportMainFile(reportOutputDir).getParentFile();
List<ReportFuncLink> reportLinks = new ArrayList<ReportFuncLink>(testFunctions.size());
// generate each function report
for (TestFunction rootFunc : testFunctions) {
TestMethod method = (TestMethod) rootFunc;
File funcReportParentDir = new File(CommonPath.funcHtmlReportRootDir(reportOutputDir),
method.getTestClass().getQualifiedName());
funcReportParentDir.mkdirs();
VelocityContext funcContext = new VelocityContext();
if (rootFunc.getTestDoc() == null) {
funcContext.put("title", rootFunc.getSimpleName());
} else {
funcContext.put("title", rootFunc.getTestDoc());
}
funcContext.put("externalResourceRootDir", CommonUtils.relativize(
CommonPath.htmlExternalResourceRootDir(reportOutputDir), funcReportParentDir).getPath());
if (!(rootFunc instanceof TestMethod)) {
throw new RuntimeException("not supported yet: " + rootFunc);
}
funcContext.put("className", method.getTestClass().getQualifiedName());
funcContext.put("classTestDoc", method.getTestClass().getTestDoc());
funcContext.put("funcName", rootFunc.getSimpleName());
funcContext.put("funcTestDoc", rootFunc.getTestDoc());
RootFuncRunResult runResult = runResults.getRunResultByRootFunction(rootFunc);
List<LineScreenCapture> lineScreenCaptures;
if (runResult == null) {
lineScreenCaptures = new ArrayList<LineScreenCapture>(0);
} else {
lineScreenCaptures = runResult.getLineScreenCaptures();
}
RunFailure runFailure = getRunFailure(runResult);
if (runFailure == null) {
funcContext.put("errMsg", null);
} else {
funcContext.put("errMsg", runFailure.getMessage().trim());
}
List<ReportCodeLine> reportCodeBody = generateReportCodeBody(rootFunc, runFailure);
funcContext.put("codeBody", reportCodeBody);
List<ReportScreenCapture> captures = generateReportScreenCaptures(
reportCodeBody, lineScreenCaptures,
inputCaptureRootDir, reportOutputDir, funcReportParentDir);
funcContext.put("captures", captures);
File funcReportFile = new File(funcReportParentDir, rootFunc.getSimpleName() + ".html");
generateVelocityOutput(funcContext, "/template/report.html.vm", funcReportFile);
// set reportLinks data
ReportFuncLink reportLink = new ReportFuncLink();
reportLink.setTitle(method.getQualifiedName());
reportLink.setPath(CommonUtils.relativize(funcReportFile, reportMainDir).getPath());
reportLinks.add(reportLink);
}
// generate main index.html report
VelocityContext mainContext = new VelocityContext();
mainContext.put("reportLinks", reportLinks);
generateVelocityOutput(mainContext, "/template/index.html.vm",
CommonPath.htmlReportMainFile(reportOutputDir));
}
private void generateVelocityOutput(
VelocityContext context, String templateResourcePath, File outputFile) {
outputFile.getParentFile().mkdirs();
InputStream in = null;
Reader reader = null;
FileWriterWithEncoding writer = null;
try {
in = this.getClass().getResourceAsStream(templateResourcePath);
reader = new BufferedReader(new InputStreamReader(in, "UTF-8"));
writer = new FileWriterWithEncoding(outputFile, "UTF-8");
Velocity.evaluate(context, writer, this.getClass().getSimpleName(), reader);
writer.close();
reader.close();
in.close();
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
IOUtils.closeQuietly(writer);
IOUtils.closeQuietly(reader);
IOUtils.closeQuietly(in);
}
}
private void extractHtmlExternalResFromThisJar(File htmlExternalResourceRootDir, String copyPath) {
InputStream in = this.getClass().getResourceAsStream("/" + copyPath);
File destFile = new File(htmlExternalResourceRootDir, copyPath);
destFile.getParentFile().mkdirs();
try {
FileUtils.copyInputStreamToFile(in, destFile);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| Show screen capture on report for nested sub steps
| src/main/java/org/sahagin/report/HtmlReport.java | Show screen capture on report for nested sub steps |
|
Java | apache-2.0 | ec7ee56381c5a5079fac72f2bb4b44f7e83cba92 | 0 | fhoeben/hsac-fitnesse-fixtures,fhoeben/hsac-fitnesse-fixtures,fhoeben/hsac-fitnesse-fixtures,fhoeben/hsac-fitnesse-fixtures | package nl.hsac.fitnesse.junit.reportmerge.writer;
import nl.hsac.fitnesse.junit.reportmerge.TestReportHtml;
import java.io.File;
import java.io.PrintWriter;
import java.text.NumberFormat;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.ERROR_STATUS;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.FAIL_STATUS;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.IGNORE_STATUS;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.NO_TEST_STATUS;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.PASS_STATUS;
public class HtmlOverviewFileWriter extends OverviewFileWriter {
private static final String TESTCOUNT_CHART_ID = "countPie";
private static final String RUNTIME_CHART_ID = "runtimePie";
private static final String STATUS_CHART_ID = "statusPie";
private static final String TIME_PER_TEST_CHART_ID = "timePerTestChart";
private static final String COPY_TO_CLIPBOARD_JS =
"function elementContentsToClipboard(el) {" +
"var selected = document.getSelection().rangeCount > 0? document.getSelection().getRangeAt(0):false;" +
"var body = document.body, range, sel;" +
"if (document.createRange && window.getSelection) {" +
"range = document.createRange();" +
"sel = window.getSelection();" +
"sel.removeAllRanges();" +
"try {" +
"range.selectNodeContents(el);" +
"sel.addRange(range);" +
"} catch (e) {" +
"range.selectNode(el);" +
"sel.addRange(range);" +
"}" +
"document.execCommand('copy');" +
"} else if (body.createTextRange) {" +
"range = body.createTextRange();" +
"range.moveToElementText(el);" +
"range.select();" +
"range.execCommand('copy');" +
"}" +
"document.getSelection().removeAllRanges();" +
"if (selected) document.getSelection().addRange(selected);" +
"}";
private final NumberFormat nf = NumberFormat.getIntegerInstance();
public HtmlOverviewFileWriter(File parentDir) {
super(parentDir, "index.html");
}
@Override
protected void writeContent(List<TestReportHtml> htmls) {
writeHeader(pw, htmls);
writeBody(pw, htmls);
writeFooter(pw, htmls);
}
protected void writeHeader(PrintWriter pw, List<TestReportHtml> htmls) {
TestReportHtml firstTestPage = htmls.get(0);
String firstRunCssDir = firstTestPage.getDirectory();
pw.write("<html><head><meta http-equiv='Content-Type' content='text/html;charset=UTF-8'/><link rel='stylesheet' type='text/css' href='");
pw.write(firstRunCssDir);
pw.write("/css/fitnesse.css'/>");
pw.write("<script type='text/javascript'>");
pw.write(COPY_TO_CLIPBOARD_JS);
pw.write("</script>");
writeExtraHeaderContent(pw, htmls);
pw.write("</head><body>");
}
protected void writeExtraHeaderContent(PrintWriter pw, List<TestReportHtml> htmls) {
getChartWriter(pw).writeLoadScriptTag();
}
protected void writeFooter(PrintWriter pw, List<TestReportHtml> htmls) {
pw.write("</body></html>");
}
protected void writeBody(PrintWriter pw, List<TestReportHtml> htmls) {
writeOverviewSection(pw, htmls);
writeTestResultsSection(pw, htmls);
}
protected void writeOverviewSection(PrintWriter pw, List<TestReportHtml> htmls) {
writeOverviewGraph(pw, htmls);
writeLinks(pw, htmls);
List<TestReportHtml> overviewPages = filterBy(htmls, TestReportHtml::isOverviewPage);
writeSection(pw, "Overview Pages", overviewPages);
}
protected void writeOverviewGraph(PrintWriter pw, List<TestReportHtml> htmls) {
pw.write("<div style='width:100%;'>");
List<TestReportHtml> testHtmls = filterBy(htmls,
x -> !x.isOverviewPage()
&& !NO_TEST_STATUS.equals(x.getStatus()));
pw.write("<table style='width:100%;text-align:center;' id='simpleStatusGraph'><tr>");
writeGraphCell(pw, ERROR_STATUS, testHtmls);
writeGraphCell(pw, FAIL_STATUS, testHtmls);
writeGraphCell(pw, IGNORE_STATUS, testHtmls);
writeGraphCell(pw, PASS_STATUS, testHtmls);
pw.write("</tr></table>");
writePieChartsElement(pw, htmls);
pw.write("</div>");
}
protected void writeGraphCell(PrintWriter pw, String status, List<TestReportHtml> testHtmls) {
int totalCount = testHtmls.size();
int count = filterByStatus(testHtmls, status).size();
if (count > 0) {
int pct = (count * 100) / totalCount;
String cell = String.format("<td class=\"%s\" style=\"width:%s%%;\">%s</td>", status, pct, count);
pw.write(cell);
}
}
protected void writePieChartsElement(PrintWriter pw, List<TestReportHtml> htmls) {
ChartWriter pieChartWriter = getChartWriter(pw);
pw.write("<div style='display:flex;flex-wrap:wrap;justify-content:center;'>");
writePieChartElements(pw, htmls);
pieChartWriter.writeChartGenerators(
htmls,
this::writePieChartGeneratorBody,
"document.getElementById('simpleStatusGraph').outerHTML=''");
pw.write("</div>");
}
protected void writePieChartGeneratorBody(ChartWriter writer, List<TestReportHtml> htmls) {
List<TestReportHtml> nonOverviews = filterBy(htmls, r -> !r.isOverviewPage());
writeStatusPieChartGenerator(writer, nonOverviews);
writer.writePieChartGenerator("Tests / Run", TESTCOUNT_CHART_ID, nonOverviews,
r -> r.getRunName(), Collectors.counting());
writer.writePieChartGenerator("Time / Run", RUNTIME_CHART_ID, nonOverviews,
r -> r.getRunName(), Collectors.summingLong(r -> r.getTime() < 0 ? 0 : r.getTime()));
writer.writeBarChartGenerator("ms / Test", TIME_PER_TEST_CHART_ID,
",hAxis:{textPosition:'none'}");
}
protected void writeStatusPieChartGenerator(ChartWriter writer, List<TestReportHtml> htmls) {
Map<String, Long> displayedStatus = getStatusMap(htmls);
writer.writePieChartGenerator("Status", STATUS_CHART_ID,
",slices:[{color:'#ffffaa'},{color:'#FF6666'},{color:'orange'},{color:'#28B463'},{color:'lightgray'}]",
r -> r.getKey(), r -> r.getValue(), displayedStatus.entrySet());
}
protected void writePieChartElements(PrintWriter pw, List<TestReportHtml> htmls) {
pw.write("<div id='");
pw.write(STATUS_CHART_ID);
pw.write("'></div>");
pw.write("<div id='");
pw.write(TESTCOUNT_CHART_ID);
pw.write("'></div>");
pw.write("<div id='");
pw.write(RUNTIME_CHART_ID);
pw.write("'></div>");
}
protected void writeLinks(PrintWriter pw, List<TestReportHtml> htmls) {
pw.write("<div style='position:absolute;right:0;'>");
pw.write("Test results in: ");
pw.write("<a href='test-results.csv'>CSV</a> <a href='test-results.json'>JSON</a>");
pw.write("</div>");
}
protected void writeTestResultsSection(PrintWriter pw, List<TestReportHtml> htmls) {
List<TestReportHtml> testHtmls = filterBy(htmls, x -> !x.isOverviewPage());
pw.write("<div id='TestResults' style='width:100%;'>");
List<TestReportHtml> erroredTests = filterByStatus(testHtmls, ERROR_STATUS);
writeSection(pw, "Errored Tests", erroredTests);
List<TestReportHtml> failedTests = filterByStatus(testHtmls, FAIL_STATUS);
writeSection(pw, "Failed Tests", failedTests);
List<TestReportHtml> ignoredTests = filterByStatus(testHtmls, IGNORE_STATUS);
writeSection(pw, "Ignored Tests", ignoredTests);
pw.write("<div id='");
pw.write(TIME_PER_TEST_CHART_ID);
pw.write("' style='height: 300px;'></div>");
List<TestReportHtml> passedTests = filterByStatus(testHtmls, PASS_STATUS);
writeSection(pw, "Passed Tests", passedTests);
List<TestReportHtml> noTests = filterByStatus(testHtmls, NO_TEST_STATUS);
writeSection(pw, "Pages Without Assertions", noTests);
pw.write("</div>");
}
protected void writeSection(PrintWriter pw, String header, List<TestReportHtml> htmls) {
if (!htmls.isEmpty()) {
String id = header.replaceAll("\\s", "");
pw.write("<div id=\"");
pw.write(id);
pw.write("\">");
pw.write("<h2>");
pw.write(header);
pw.write("</h2>");
String tableId = id + "Table";
pw.write("<input type='button' value='to clipboard' ");
pw.write("onclick=\"elementContentsToClipboard(document.getElementById('");
pw.write(tableId);
pw.write("'));\">");
writeTestsTable(pw, tableId, htmls);
pw.write("</div>");
}
}
protected void writeTestsTable(PrintWriter pw, String id, List<TestReportHtml> htmls) {
pw.write("<table id='");
pw.write(id);
pw.write("'><tr><th>Run</th><th>Name</th><th>Runtime (in milliseconds)</th></tr>");
// slowest times at top
htmls.sort((o1, o2) -> Long.compare(o2.getTime(), o1.getTime()));
for (TestReportHtml test : htmls) {
writeTestRow(pw, test);
}
pw.write("</table>");
}
protected void writeTestRow(PrintWriter pw, TestReportHtml html) {
String testPageName = html.getRelativePath();
String status = html.getStatus();
String run = html.getRunName();
String testName = html.getTestName();
long time = html.getTime();
pw.write("<tr class=\"");
pw.write(status);
pw.write("\">");
pw.write("<td>");
pw.write(run);
pw.write("</td>");
pw.write("<td><a href=\"");
pw.write(testPageName);
pw.write("\">");
pw.write(testName);
pw.write("</a></td><td>");
pw.write(time < 0 ? "unknown" : nf.format(time));
pw.write("</td></tr>");
}
protected Map<String, Long> getStatusMap(List<TestReportHtml> htmls) {
Map<String, Long> statuses = htmls.stream()
.collect(Collectors.groupingBy(TestReportHtml::getStatus, Collectors.counting()));
Map<String, Long> displayedStatus = new LinkedHashMap<>();
addStatusEntry(ERROR_STATUS, statuses, displayedStatus);
addStatusEntry(FAIL_STATUS, statuses, displayedStatus);
addStatusEntry(IGNORE_STATUS, statuses, displayedStatus);
addStatusEntry(PASS_STATUS, statuses, displayedStatus);
addStatusEntry(NO_TEST_STATUS, statuses, displayedStatus);
return displayedStatus;
}
protected void addStatusEntry(String status, Map<String, Long> statuses, Map<String, Long> displayedStatus) {
displayedStatus.put(status, statuses.getOrDefault(status, 0L));
}
protected ChartWriter getChartWriter(PrintWriter pw) {
return new ChartWriter(pw);
}
protected static List<TestReportHtml> filterByStatus(List<TestReportHtml> htmls, String desiredStatus) {
return filterBy(htmls, x -> desiredStatus.equals(x.getStatus()));
}
protected static <T> List<T> filterBy(List<T> list, Predicate<T> predicate) {
return list.stream().filter(predicate).collect(Collectors.toList());
}
}
| src/main/java/nl/hsac/fitnesse/junit/reportmerge/writer/HtmlOverviewFileWriter.java | package nl.hsac.fitnesse.junit.reportmerge.writer;
import nl.hsac.fitnesse.junit.reportmerge.TestReportHtml;
import java.io.File;
import java.io.PrintWriter;
import java.text.NumberFormat;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.ERROR_STATUS;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.FAIL_STATUS;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.IGNORE_STATUS;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.NO_TEST_STATUS;
import static nl.hsac.fitnesse.junit.reportmerge.TestReportHtml.PASS_STATUS;
public class HtmlOverviewFileWriter extends OverviewFileWriter {
private static final String TESTCOUNT_CHART_ID = "countPie";
private static final String RUNTIME_CHART_ID = "runtimePie";
private static final String STATUS_CHART_ID = "statusPie";
private static final String TIME_PER_TEST_CHART_ID = "timePerTestChart";
private static final String COPY_TO_CLIPBOARD_JS =
"function elementContentsToClipboard(el) {" +
"var selected = document.getSelection().rangeCount > 0? document.getSelection().getRangeAt(0):false;" +
"var body = document.body, range, sel;" +
"if (document.createRange && window.getSelection) {" +
"range = document.createRange();" +
"sel = window.getSelection();" +
"sel.removeAllRanges();" +
"try {" +
"range.selectNodeContents(el);" +
"sel.addRange(range);" +
"} catch (e) {" +
"range.selectNode(el);" +
"sel.addRange(range);" +
"}" +
"document.execCommand('copy');" +
"} else if (body.createTextRange) {" +
"range = body.createTextRange();" +
"range.moveToElementText(el);" +
"range.select();" +
"range.execCommand('copy');" +
"}" +
"document.getSelection().removeAllRanges();" +
"if (selected) document.getSelection().addRange(selected);" +
"}";
private final NumberFormat nf = NumberFormat.getIntegerInstance();
public HtmlOverviewFileWriter(File parentDir) {
super(parentDir, "index.html");
}
@Override
protected void writeContent(List<TestReportHtml> htmls) {
writeHeader(pw, htmls);
writeBody(pw, htmls);
writeFooter(pw, htmls);
}
protected void writeHeader(PrintWriter pw, List<TestReportHtml> htmls) {
TestReportHtml firstTestPage = htmls.get(0);
String firstRunCssDir = firstTestPage.getDirectory();
pw.write("<html><head><meta http-equiv='Content-Type' content='text/html;charset=UTF-8'/><link rel='stylesheet' type='text/css' href='");
pw.write(firstRunCssDir);
pw.write("/css/fitnesse.css'/>");
pw.write("<script type='text/javascript'>");
pw.write(COPY_TO_CLIPBOARD_JS);
pw.write("</script>");
writeExtraHeaderContent(pw, htmls);
pw.write("</head><body>");
}
protected void writeExtraHeaderContent(PrintWriter pw, List<TestReportHtml> htmls) {
getChartWriter(pw).writeLoadScriptTag();
}
protected void writeFooter(PrintWriter pw, List<TestReportHtml> htmls) {
pw.write("</body></html>");
}
protected void writeBody(PrintWriter pw, List<TestReportHtml> htmls) {
writeOverviewSection(pw, htmls);
writeTestResultsSection(pw, htmls);
}
protected void writeOverviewSection(PrintWriter pw, List<TestReportHtml> htmls) {
writeOverviewGraph(pw, htmls);
List<TestReportHtml> overviewPages = filterBy(htmls, TestReportHtml::isOverviewPage);
writeSection(pw, "Overview Pages", overviewPages);
}
protected void writeOverviewGraph(PrintWriter pw, List<TestReportHtml> htmls) {
pw.write("<div style='width:100%;'>");
List<TestReportHtml> testHtmls = filterBy(htmls,
x -> !x.isOverviewPage()
&& !NO_TEST_STATUS.equals(x.getStatus()));
pw.write("<table style='width:100%;text-align:center;' id='simpleStatusGraph'><tr>");
writeGraphCell(pw, ERROR_STATUS, testHtmls);
writeGraphCell(pw, FAIL_STATUS, testHtmls);
writeGraphCell(pw, IGNORE_STATUS, testHtmls);
writeGraphCell(pw, PASS_STATUS, testHtmls);
pw.write("</tr></table>");
writePieChartsElement(pw, htmls);
pw.write("</div>");
}
protected void writeGraphCell(PrintWriter pw, String status, List<TestReportHtml> testHtmls) {
int totalCount = testHtmls.size();
int count = filterByStatus(testHtmls, status).size();
if (count > 0) {
int pct = (count * 100) / totalCount;
String cell = String.format("<td class=\"%s\" style=\"width:%s%%;\">%s</td>", status, pct, count);
pw.write(cell);
}
}
protected void writePieChartsElement(PrintWriter pw, List<TestReportHtml> htmls) {
ChartWriter pieChartWriter = getChartWriter(pw);
pw.write("<div style='display:flex;flex-wrap:wrap;justify-content:center;'>");
writePieChartElements(pw, htmls);
pieChartWriter.writeChartGenerators(
htmls,
this::writePieChartGeneratorBody,
"document.getElementById('simpleStatusGraph').outerHTML=''");
pw.write("</div>");
}
protected void writePieChartGeneratorBody(ChartWriter writer, List<TestReportHtml> htmls) {
List<TestReportHtml> nonOverviews = filterBy(htmls, r -> !r.isOverviewPage());
writeStatusPieChartGenerator(writer, nonOverviews);
writer.writePieChartGenerator("Tests / Run", TESTCOUNT_CHART_ID, nonOverviews,
r -> r.getRunName(), Collectors.counting());
writer.writePieChartGenerator("Time / Run", RUNTIME_CHART_ID, nonOverviews,
r -> r.getRunName(), Collectors.summingLong(r -> r.getTime() < 0 ? 0 : r.getTime()));
writer.writeBarChartGenerator("ms / Test", TIME_PER_TEST_CHART_ID,
",hAxis:{textPosition:'none'}");
}
protected void writeStatusPieChartGenerator(ChartWriter writer, List<TestReportHtml> htmls) {
Map<String, Long> displayedStatus = getStatusMap(htmls);
writer.writePieChartGenerator("Status", STATUS_CHART_ID,
",slices:[{color:'#ffffaa'},{color:'#FF6666'},{color:'orange'},{color:'#28B463'},{color:'lightgray'}]",
r -> r.getKey(), r -> r.getValue(), displayedStatus.entrySet());
}
protected void writePieChartElements(PrintWriter pw, List<TestReportHtml> htmls) {
pw.write("<div id='");
pw.write(STATUS_CHART_ID);
pw.write("'></div>");
pw.write("<div id='");
pw.write(TESTCOUNT_CHART_ID);
pw.write("'></div>");
pw.write("<div id='");
pw.write(RUNTIME_CHART_ID);
pw.write("'></div>");
}
protected void writeTestResultsSection(PrintWriter pw, List<TestReportHtml> htmls) {
List<TestReportHtml> testHtmls = filterBy(htmls, x -> !x.isOverviewPage());
pw.write("<div id='TestResults' style='width:100%;'>");
List<TestReportHtml> erroredTests = filterByStatus(testHtmls, ERROR_STATUS);
writeSection(pw, "Errored Tests", erroredTests);
List<TestReportHtml> failedTests = filterByStatus(testHtmls, FAIL_STATUS);
writeSection(pw, "Failed Tests", failedTests);
List<TestReportHtml> ignoredTests = filterByStatus(testHtmls, IGNORE_STATUS);
writeSection(pw, "Ignored Tests", ignoredTests);
pw.write("<div id='");
pw.write(TIME_PER_TEST_CHART_ID);
pw.write("' style='height: 300px;'></div>");
List<TestReportHtml> passedTests = filterByStatus(testHtmls, PASS_STATUS);
writeSection(pw, "Passed Tests", passedTests);
List<TestReportHtml> noTests = filterByStatus(testHtmls, NO_TEST_STATUS);
writeSection(pw, "Pages Without Assertions", noTests);
pw.write("</div>");
}
protected void writeSection(PrintWriter pw, String header, List<TestReportHtml> htmls) {
if (!htmls.isEmpty()) {
String id = header.replaceAll("\\s", "");
pw.write("<div id=\"");
pw.write(id);
pw.write("\">");
pw.write("<h2>");
pw.write(header);
pw.write("</h2>");
String tableId = id + "Table";
pw.write("<input type='button' value='to clipboard' ");
pw.write("onclick=\"elementContentsToClipboard(document.getElementById('");
pw.write(tableId);
pw.write("'));\">");
writeTestsTable(pw, tableId, htmls);
pw.write("</div>");
}
}
protected void writeTestsTable(PrintWriter pw, String id, List<TestReportHtml> htmls) {
pw.write("<table id='");
pw.write(id);
pw.write("'><tr><th>Run</th><th>Name</th><th>Runtime (in milliseconds)</th></tr>");
// slowest times at top
htmls.sort((o1, o2) -> Long.compare(o2.getTime(), o1.getTime()));
for (TestReportHtml test : htmls) {
writeTestRow(pw, test);
}
pw.write("</table>");
}
protected void writeTestRow(PrintWriter pw, TestReportHtml html) {
String testPageName = html.getRelativePath();
String status = html.getStatus();
String run = html.getRunName();
String testName = html.getTestName();
long time = html.getTime();
pw.write("<tr class=\"");
pw.write(status);
pw.write("\">");
pw.write("<td>");
pw.write(run);
pw.write("</td>");
pw.write("<td><a href=\"");
pw.write(testPageName);
pw.write("\">");
pw.write(testName);
pw.write("</a></td><td>");
pw.write(time < 0 ? "unknown" : nf.format(time));
pw.write("</td></tr>");
}
protected Map<String, Long> getStatusMap(List<TestReportHtml> htmls) {
Map<String, Long> statuses = htmls.stream()
.collect(Collectors.groupingBy(TestReportHtml::getStatus, Collectors.counting()));
Map<String, Long> displayedStatus = new LinkedHashMap<>();
addStatusEntry(ERROR_STATUS, statuses, displayedStatus);
addStatusEntry(FAIL_STATUS, statuses, displayedStatus);
addStatusEntry(IGNORE_STATUS, statuses, displayedStatus);
addStatusEntry(PASS_STATUS, statuses, displayedStatus);
addStatusEntry(NO_TEST_STATUS, statuses, displayedStatus);
return displayedStatus;
}
protected void addStatusEntry(String status, Map<String, Long> statuses, Map<String, Long> displayedStatus) {
displayedStatus.put(status, statuses.getOrDefault(status, 0L));
}
protected ChartWriter getChartWriter(PrintWriter pw) {
return new ChartWriter(pw);
}
protected static List<TestReportHtml> filterByStatus(List<TestReportHtml> htmls, String desiredStatus) {
return filterBy(htmls, x -> desiredStatus.equals(x.getStatus()));
}
protected static <T> List<T> filterBy(List<T> list, Predicate<T> predicate) {
return list.stream().filter(predicate).collect(Collectors.toList());
}
}
| Add links to test result files to generated overview page
| src/main/java/nl/hsac/fitnesse/junit/reportmerge/writer/HtmlOverviewFileWriter.java | Add links to test result files to generated overview page |
|
Java | apache-2.0 | 0288adfec8bced3ef54302a3078fcf9fa93c63cd | 0 | PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr | package org.apache.solr.handler;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.SolrException;
import org.apache.solr.handler.extraction.ExtractingParams;
import org.apache.solr.handler.extraction.ExtractingRequestHandler;
import org.apache.solr.handler.extraction.ExtractingDocumentLoader;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import java.util.List;
import java.util.ArrayList;
import java.io.File;
/**
*
*
**/
public class ExtractingRequestHandlerTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml", "schema.xml");
}
@Before
public void setUp() throws Exception {
super.setUp();
clearIndex();
assertU(commit());
}
@Test
public void testExtraction() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
loadLocal("solr-word.pdf", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "extractedContent",
"literal.id", "one",
"fmap.Last-Modified", "extractedDate"
);
assertQ(req("title:solr-word"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("title:solr-word"), "//*[@numFound='1']");
loadLocal("simple.html", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.language", "extractedLanguage",
"literal.id", "two",
"fmap.content", "extractedContent",
"fmap.Last-Modified", "extractedDate"
);
assertQ(req("title:Welcome"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("title:Welcome"), "//*[@numFound='1']");
loadLocal("simple.html",
"literal.id","simple2",
"uprefix", "t_",
"lowernames", "true",
"captureAttr", "true",
"fmap.a","t_href",
"fmap.content_type", "abcxyz", // test that lowernames is applied before mapping, and uprefix is applied after mapping
"commit", "true" // test immediate commit
);
// test that purposely causes a failure to print out the doc for test debugging
// assertQ(req("q","id:simple2","indent","true"), "//*[@numFound='0']");
// test both lowernames and unknown field mapping
//assertQ(req("+id:simple2 +t_content_type:[* TO *]"), "//*[@numFound='1']");
assertQ(req("+id:simple2 +t_href:[* TO *]"), "//*[@numFound='1']");
assertQ(req("+id:simple2 +t_abcxyz:[* TO *]"), "//*[@numFound='1']");
// load again in the exact same way, but boost one field
loadLocal("simple.html",
"literal.id","simple3",
"uprefix", "t_",
"lowernames", "true",
"captureAttr", "true", "fmap.a","t_href",
"commit", "true"
,"boost.t_href", "100.0"
);
assertQ(req("t_href:http"), "//*[@numFound='2']");
assertQ(req("t_href:http"), "//doc[1]/str[.='simple3']");
assertQ(req("+id:simple3 +t_content_type:[* TO *]"), "//*[@numFound='1']");//test lowercase and then uprefix
// test capture
loadLocal("simple.html",
"literal.id","simple4",
"uprefix", "t_",
"capture","p", // capture only what is in the title element
"commit", "true"
);
assertQ(req("+id:simple4 +t_content:Solr"), "//*[@numFound='1']");
assertQ(req("+id:simple4 +t_p:\"here is some text\""), "//*[@numFound='1']");
loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"literal.id", "three",
"fmap.content", "extractedContent",
"fmap.language", "extractedLanguage",
"fmap.Last-Modified", "extractedDate"
);
assertQ(req("stream_name:version_control.xml"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("stream_name:version_control.xml"), "//*[@numFound='1']");
}
@Test
public void testDefaultField() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
try {
ignoreException("unknown field 'a'");
loadLocal("simple.html",
"literal.id","simple2",
"lowernames", "true",
"captureAttr", "true",
//"fmap.content_type", "abcxyz",
"commit", "true" // test immediate commit
);
assertTrue(false);
} catch (SolrException e) {
//do nothing
} finally {
resetExceptionIgnores();
}
loadLocal("simple.html",
"literal.id","simple2",
ExtractingParams.DEFAULT_FIELD, "defaultExtr",//test that unmapped fields go to the text field when no uprefix is specified
"lowernames", "true",
"captureAttr", "true",
//"fmap.content_type", "abcxyz",
"commit", "true" // test immediate commit
);
assertQ(req("id:simple2"), "//*[@numFound='1']");
assertQ(req("defaultExtr:http\\://www.apache.org"), "//*[@numFound='1']");
//Test when both uprefix and default are specified.
loadLocal("simple.html",
"literal.id","simple2",
ExtractingParams.DEFAULT_FIELD, "defaultExtr",//test that unmapped fields go to the text field when no uprefix is specified
ExtractingParams.UNKNOWN_FIELD_PREFIX, "t_",
"lowernames", "true",
"captureAttr", "true",
"fmap.a","t_href",
//"fmap.content_type", "abcxyz",
"commit", "true" // test immediate commit
);
assertQ(req("+id:simple2 +t_href:[* TO *]"), "//*[@numFound='1']");
}
@Test
public void testLiterals() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
//test literal
loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "extractedContent",
"literal.id", "one",
"fmap.language", "extractedLanguage",
"literal.extractionLiteralMV", "one",
"literal.extractionLiteralMV", "two",
"fmap.Last-Modified", "extractedDate"
);
assertQ(req("stream_name:version_control.xml"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("stream_name:version_control.xml"), "//*[@numFound='1']");
assertQ(req("extractionLiteralMV:one"), "//*[@numFound='1']");
assertQ(req("extractionLiteralMV:two"), "//*[@numFound='1']");
try {
loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "extractedContent",
"literal.id", "two",
"fmap.language", "extractedLanguage",
"literal.extractionLiteral", "one",
"literal.extractionLiteral", "two",
"fmap.Last-Modified", "extractedDate"
);
// TODO: original author did not specify why an exception should be thrown... how to fix?
// assertTrue("Exception should have been thrown", false);
} catch (SolrException e) {
//nothing to see here, move along
}
loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "extractedContent",
"literal.id", "three",
"fmap.language", "extractedLanguage",
"literal.extractionLiteral", "one",
"fmap.Last-Modified", "extractedDate"
);
assertU(commit());
assertQ(req("extractionLiteral:one"), "//*[@numFound='1']");
}
@Test
public void testPlainTextSpecifyingMimeType() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
// Load plain text specifying MIME type:
loadLocal("version_control.txt", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"literal.id", "one",
"fmap.language", "extractedLanguage",
"fmap.content", "extractedContent",
ExtractingParams.STREAM_TYPE, "text/plain"
);
assertQ(req("extractedContent:Apache"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("extractedContent:Apache"), "//*[@numFound='1']");
}
@Test
public void testPlainTextSpecifyingResourceName() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
// Load plain text specifying filename
loadLocal("version_control.txt", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"literal.id", "one",
"fmap.language", "extractedLanguage",
"fmap.content", "extractedContent",
ExtractingParams.RESOURCE_NAME, "version_control.txt"
);
assertQ(req("extractedContent:Apache"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("extractedContent:Apache"), "//*[@numFound='1']");
}
// Note: If you load a plain text file specifying neither MIME type nor filename, extraction will silently fail. This is because Tika's
// automatic MIME type detection will fail, and it will default to using an empty-string-returning default parser
@Test
public void testExtractOnly() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
SolrQueryResponse rsp = loadLocal("solr-word.pdf", ExtractingParams.EXTRACT_ONLY, "true");
assertTrue("rsp is null and it shouldn't be", rsp != null);
NamedList list = rsp.getValues();
String extraction = (String) list.get("solr-word.pdf");
assertTrue("extraction is null and it shouldn't be", extraction != null);
assertTrue(extraction + " does not contain " + "solr-word", extraction.indexOf("solr-word") != -1);
NamedList nl = (NamedList) list.get("solr-word.pdf_metadata");
assertTrue("nl is null and it shouldn't be", nl != null);
Object title = nl.get("title");
assertTrue("title is null and it shouldn't be", title != null);
assertTrue(extraction.indexOf("<?xml") != -1);
rsp = loadLocal("solr-word.pdf", ExtractingParams.EXTRACT_ONLY, "true",
ExtractingParams.EXTRACT_FORMAT, ExtractingDocumentLoader.TEXT_FORMAT);
assertTrue("rsp is null and it shouldn't be", rsp != null);
list = rsp.getValues();
extraction = (String) list.get("solr-word.pdf");
assertTrue("extraction is null and it shouldn't be", extraction != null);
assertTrue(extraction + " does not contain " + "solr-word", extraction.indexOf("solr-word") != -1);
assertTrue(extraction.indexOf("<?xml") == -1);
nl = (NamedList) list.get("solr-word.pdf_metadata");
assertTrue("nl is null and it shouldn't be", nl != null);
title = nl.get("title");
assertTrue("title is null and it shouldn't be", title != null);
}
@Test
public void testXPath() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
SolrQueryResponse rsp = loadLocal("example.html",
ExtractingParams.XPATH_EXPRESSION, "/xhtml:html/xhtml:body/xhtml:a/descendant:node()",
ExtractingParams.EXTRACT_ONLY, "true"
);
assertTrue("rsp is null and it shouldn't be", rsp != null);
NamedList list = rsp.getValues();
String val = (String) list.get("example.html");
val = val.trim();
assertTrue(val + " is not equal to " + "linkNews", val.equals("linkNews") == true);//there are two <a> tags, and they get collapesd
}
/** test arabic PDF extraction is functional */
@Test
public void testArabicPDF() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler)
h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
loadLocal("arabic.pdf", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "wdf_nocase",
"literal.id", "one",
"fmap.Last-Modified", "extractedDate");
assertQ(req("wdf_nocase:السلم"), "//result[@numFound=0]");
assertU(commit());
assertQ(req("wdf_nocase:السلم"), "//result[@numFound=1]");
}
SolrQueryResponse loadLocal(String filename, String... args) throws Exception {
LocalSolrQueryRequest req = (LocalSolrQueryRequest) req(args);
try {
// TODO: stop using locally defined streams once stream.file and
// stream.body work everywhere
List<ContentStream> cs = new ArrayList<ContentStream>();
cs.add(new ContentStreamBase.FileStream(new File(filename)));
req.setContentStreams(cs);
return h.queryAndResponse("/update/extract", req);
} finally {
req.close();
}
}
}
| solr/contrib/extraction/src/test/java/org/apache/solr/handler/ExtractingRequestHandlerTest.java | package org.apache.solr.handler;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.SolrException;
import org.apache.solr.handler.extraction.ExtractingParams;
import org.apache.solr.handler.extraction.ExtractingRequestHandler;
import org.apache.solr.handler.extraction.ExtractingDocumentLoader;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import java.util.List;
import java.util.ArrayList;
import java.io.File;
/**
*
*
**/
public class ExtractingRequestHandlerTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml", "schema.xml");
}
@Before
public void setUp() throws Exception {
super.setUp();
clearIndex();
assertU(commit());
}
@Test
public void testExtraction() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
loadLocal("solr-word.pdf", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "extractedContent",
"literal.id", "one",
"fmap.Last-Modified", "extractedDate"
);
assertQ(req("title:solr-word"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("title:solr-word"), "//*[@numFound='1']");
loadLocal("simple.html", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.language", "extractedLanguage",
"literal.id", "two",
"fmap.content", "extractedContent",
"fmap.Last-Modified", "extractedDate"
);
assertQ(req("title:Welcome"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("title:Welcome"), "//*[@numFound='1']");
loadLocal("simple.html",
"literal.id","simple2",
"uprefix", "t_",
"lowernames", "true",
"captureAttr", "true",
"fmap.a","t_href",
"fmap.content_type", "abcxyz", // test that lowernames is applied before mapping, and uprefix is applied after mapping
"commit", "true" // test immediate commit
);
// test that purposely causes a failure to print out the doc for test debugging
// assertQ(req("q","id:simple2","indent","true"), "//*[@numFound='0']");
// test both lowernames and unknown field mapping
//assertQ(req("+id:simple2 +t_content_type:[* TO *]"), "//*[@numFound='1']");
assertQ(req("+id:simple2 +t_href:[* TO *]"), "//*[@numFound='1']");
assertQ(req("+id:simple2 +t_abcxyz:[* TO *]"), "//*[@numFound='1']");
// load again in the exact same way, but boost one field
loadLocal("simple.html",
"literal.id","simple3",
"uprefix", "t_",
"lowernames", "true",
"captureAttr", "true", "fmap.a","t_href",
"commit", "true"
,"boost.t_href", "100.0"
);
assertQ(req("t_href:http"), "//*[@numFound='2']");
assertQ(req("t_href:http"), "//doc[1]/str[.='simple3']");
assertQ(req("+id:simple3 +t_content_type:[* TO *]"), "//*[@numFound='1']");//test lowercase and then uprefix
// test capture
loadLocal("simple.html",
"literal.id","simple4",
"uprefix", "t_",
"capture","p", // capture only what is in the title element
"commit", "true"
);
assertQ(req("+id:simple4 +t_content:Solr"), "//*[@numFound='1']");
assertQ(req("+id:simple4 +t_p:\"here is some text\""), "//*[@numFound='1']");
loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"literal.id", "three",
"fmap.content", "extractedContent",
"fmap.language", "extractedLanguage",
"fmap.Last-Modified", "extractedDate"
);
assertQ(req("stream_name:version_control.xml"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("stream_name:version_control.xml"), "//*[@numFound='1']");
}
@Test
public void testDefaultField() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
try {
ignoreException("unknown field 'a'");
loadLocal("simple.html",
"literal.id","simple2",
"lowernames", "true",
"captureAttr", "true",
//"fmap.content_type", "abcxyz",
"commit", "true" // test immediate commit
);
assertTrue(false);
} catch (SolrException e) {
//do nothing
} finally {
resetExceptionIgnores();
}
loadLocal("simple.html",
"literal.id","simple2",
ExtractingParams.DEFAULT_FIELD, "defaultExtr",//test that unmapped fields go to the text field when no uprefix is specified
"lowernames", "true",
"captureAttr", "true",
//"fmap.content_type", "abcxyz",
"commit", "true" // test immediate commit
);
assertQ(req("id:simple2"), "//*[@numFound='1']");
assertQ(req("defaultExtr:http\\://www.apache.org"), "//*[@numFound='1']");
//Test when both uprefix and default are specified.
loadLocal("simple.html",
"literal.id","simple2",
ExtractingParams.DEFAULT_FIELD, "defaultExtr",//test that unmapped fields go to the text field when no uprefix is specified
ExtractingParams.UNKNOWN_FIELD_PREFIX, "t_",
"lowernames", "true",
"captureAttr", "true",
"fmap.a","t_href",
//"fmap.content_type", "abcxyz",
"commit", "true" // test immediate commit
);
assertQ(req("+id:simple2 +t_href:[* TO *]"), "//*[@numFound='1']");
}
@Test
public void testLiterals() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
//test literal
loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "extractedContent",
"literal.id", "one",
"fmap.language", "extractedLanguage",
"literal.extractionLiteralMV", "one",
"literal.extractionLiteralMV", "two",
"fmap.Last-Modified", "extractedDate"
);
assertQ(req("stream_name:version_control.xml"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("stream_name:version_control.xml"), "//*[@numFound='1']");
assertQ(req("extractionLiteralMV:one"), "//*[@numFound='1']");
assertQ(req("extractionLiteralMV:two"), "//*[@numFound='1']");
try {
loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "extractedContent",
"literal.id", "two",
"fmap.language", "extractedLanguage",
"literal.extractionLiteral", "one",
"literal.extractionLiteral", "two",
"fmap.Last-Modified", "extractedDate"
);
// TODO: original author did not specify why an exception should be thrown... how to fix?
// assertTrue("Exception should have been thrown", false);
} catch (SolrException e) {
//nothing to see here, move along
}
loadLocal("version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "extractedContent",
"literal.id", "three",
"fmap.language", "extractedLanguage",
"literal.extractionLiteral", "one",
"fmap.Last-Modified", "extractedDate"
);
assertU(commit());
assertQ(req("extractionLiteral:one"), "//*[@numFound='1']");
}
@Test
public void testPlainTextSpecifyingMimeType() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
// Load plain text specifying MIME type:
loadLocal("version_control.txt", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"literal.id", "one",
"fmap.language", "extractedLanguage",
"fmap.content", "extractedContent",
ExtractingParams.STREAM_TYPE, "text/plain"
);
assertQ(req("extractedContent:Apache"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("extractedContent:Apache"), "//*[@numFound='1']");
}
@Test
public void testPlainTextSpecifyingResourceName() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
// Load plain text specifying filename
loadLocal("version_control.txt", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"literal.id", "one",
"fmap.language", "extractedLanguage",
"fmap.content", "extractedContent",
ExtractingParams.RESOURCE_NAME, "version_control.txt"
);
assertQ(req("extractedContent:Apache"), "//*[@numFound='0']");
assertU(commit());
assertQ(req("extractedContent:Apache"), "//*[@numFound='1']");
}
// Note: If you load a plain text file specifying neither MIME type nor filename, extraction will silently fail. This is because Tika's
// automatic MIME type detection will fail, and it will default to using an empty-string-returning default parser
@Test
public void testExtractOnly() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
SolrQueryResponse rsp = loadLocal("solr-word.pdf", ExtractingParams.EXTRACT_ONLY, "true");
assertTrue("rsp is null and it shouldn't be", rsp != null);
NamedList list = rsp.getValues();
String extraction = (String) list.get("solr-word.pdf");
assertTrue("extraction is null and it shouldn't be", extraction != null);
assertTrue(extraction + " does not contain " + "solr-word", extraction.indexOf("solr-word") != -1);
NamedList nl = (NamedList) list.get("solr-word.pdf_metadata");
assertTrue("nl is null and it shouldn't be", nl != null);
Object title = nl.get("title");
assertTrue("title is null and it shouldn't be", title != null);
assertTrue(extraction.indexOf("<?xml") != -1);
rsp = loadLocal("solr-word.pdf", ExtractingParams.EXTRACT_ONLY, "true",
ExtractingParams.EXTRACT_FORMAT, ExtractingDocumentLoader.TEXT_FORMAT);
assertTrue("rsp is null and it shouldn't be", rsp != null);
list = rsp.getValues();
extraction = (String) list.get("solr-word.pdf");
assertTrue("extraction is null and it shouldn't be", extraction != null);
assertTrue(extraction + " does not contain " + "solr-word", extraction.indexOf("solr-word") != -1);
assertTrue(extraction.indexOf("<?xml") == -1);
nl = (NamedList) list.get("solr-word.pdf_metadata");
assertTrue("nl is null and it shouldn't be", nl != null);
title = nl.get("title");
assertTrue("title is null and it shouldn't be", title != null);
}
@Test
public void testXPath() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
SolrQueryResponse rsp = loadLocal("example.html",
ExtractingParams.XPATH_EXPRESSION, "/xhtml:html/xhtml:body/xhtml:a/descendant:node()",
ExtractingParams.EXTRACT_ONLY, "true"
);
assertTrue("rsp is null and it shouldn't be", rsp != null);
NamedList list = rsp.getValues();
String val = (String) list.get("example.html");
val = val.trim();
assertTrue(val + " is not equal to " + "linkNews", val.equals("linkNews") == true);//there are two <a> tags, and they get collapesd
}
/** test arabic PDF extraction is functional */
@Test
public void testArabicPDF() throws Exception {
ExtractingRequestHandler handler = (ExtractingRequestHandler)
h.getCore().getRequestHandler("/update/extract");
assertTrue("handler is null and it shouldn't be", handler != null);
loadLocal("arabic.pdf", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
"fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
"fmap.Author", "extractedAuthor",
"fmap.content", "wdf_nocase",
"literal.id", "one",
"fmap.Last-Modified", "extractedDate");
assertQ(req("wdf_nocase:السلم"), "//result[@numFound=0]");
assertU(commit());
assertQ(req("wdf_nocase:السلم"), "//result[@numFound=1]");
}
SolrQueryResponse loadLocal(String filename, String... args) throws Exception {
LocalSolrQueryRequest req = (LocalSolrQueryRequest) req(args);
// TODO: stop using locally defined streams once stream.file and
// stream.body work everywhere
List<ContentStream> cs = new ArrayList<ContentStream>();
cs.add(new ContentStreamBase.FileStream(new File(filename)));
req.setContentStreams(cs);
return h.queryAndResponse("/update/extract", req);
}
}
| tests: fix resource leak
git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1022788 13f79535-47bb-0310-9956-ffa450edef68
| solr/contrib/extraction/src/test/java/org/apache/solr/handler/ExtractingRequestHandlerTest.java | tests: fix resource leak |
|
Java | apache-2.0 | 14e9a702a2c5454c8a4c0049e6a5c8fb063afdb0 | 0 | masterproject-reimbursement/reimbursement-server,masterproject-reimbursement/reimbursement-server | package ch.uzh.csg.reimbursement.service;
import static ch.uzh.csg.reimbursement.model.ExpenseState.ASSIGNED_TO_PROF;
import static ch.uzh.csg.reimbursement.model.ExpenseState.DRAFT;
import static ch.uzh.csg.reimbursement.model.ExpenseState.PRINTED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.REJECTED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_BE_ASSIGNED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_FINANCE_ADMIN;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_PROF;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_USER;
import static ch.uzh.csg.reimbursement.model.Role.FINANCE_ADMIN;
import static ch.uzh.csg.reimbursement.model.Role.PROF;
import static ch.uzh.csg.reimbursement.model.Role.UNI_ADMIN;
import static ch.uzh.csg.reimbursement.model.Role.USER;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import ch.uzh.csg.reimbursement.model.Expense;
import ch.uzh.csg.reimbursement.model.ExpenseItem;
import ch.uzh.csg.reimbursement.model.Token;
import ch.uzh.csg.reimbursement.model.User;
@Service
@Transactional
public class UserResourceAuthorizationService {
@Autowired
private UserService userService;
@Autowired
private ExpenseService expenseService;
public boolean checkEditAuthorization(Expense expense) {
return checkEditAuthorization(expense, userService.getLoggedInUser());
}
public boolean checkEditAuthorization(ExpenseItem expenseItem) {
return checkEditAuthorization(expenseItem.getExpense());
}
private boolean checkEditAuthorization(Expense expense, User user) {
if ((expense.getState().equals(DRAFT) || expense.getState().equals(REJECTED)) && expense.getUser().equals(user)) {
return true;
} else if (expense.getState().equals(ASSIGNED_TO_PROF) && expense.getAssignedManager() != null
&& expense.getAssignedManager().equals(user)) {
return true;
} else if ((expense.getState().equals(TO_BE_ASSIGNED) && user.getRoles().contains(FINANCE_ADMIN))
|| (expense.getFinanceAdmin() != null && expense.getFinanceAdmin().equals(user))) {
return true;
} else {
return false;
}
}
public boolean checkViewAuthorization(ExpenseItem expenseItem) {
return checkViewAuthorization(expenseItem.getExpense());
}
public boolean checkViewAuthorizationMobile(ExpenseItem expenseItem, Token token) {
return checkViewAuthorization(expenseItem.getExpense(), token.getUser());
}
public boolean checkViewAuthorizationMobile(Expense expense, Token token) {
return checkViewAuthorization(expense, token.getUser());
}
public boolean checkViewAuthorization(Expense expense) {
return checkViewAuthorization(expense, userService.getLoggedInUser());
}
private boolean checkViewAuthorization(Expense expense, User user) {
if (user.getRoles().contains(UNI_ADMIN)) {
return true;
} else if (expense.getUser().equals(user)) {
return true;
} else if (expense.getAssignedManager() != null && expense.getAssignedManager().equals(user)) {
return true;
} else if (user.getRoles().contains(FINANCE_ADMIN)) {
return true;
} else if (expense.getState().equals(PRINTED)) {
return true;
} else {
return false;
}
}
public boolean checkSignAuthorization(Expense expense) {
return checkSignAuthorization(expense, userService.getLoggedInUser());
}
private boolean checkSignAuthorization(Expense expense, User user) {
if (expense.getState().equals(TO_SIGN_BY_USER) && user.getRoles().contains(USER)) {
return true;
} else if (expense.getState().equals(TO_SIGN_BY_PROF) && user.getRoles().contains(PROF)) {
return true;
} else if (expense.getState().equals(TO_SIGN_BY_FINANCE_ADMIN) && user.getRoles().contains(FINANCE_ADMIN)) {
return true;
} else {
return false;
}
}
public boolean checkAssignAuthorization(Expense expense) {
if (expense.getExpenseItems().isEmpty()) {
return false;
} else {
return true;
}
}
} | src/main/java/ch/uzh/csg/reimbursement/service/UserResourceAuthorizationService.java | package ch.uzh.csg.reimbursement.service;
import static ch.uzh.csg.reimbursement.model.ExpenseState.ASSIGNED_TO_PROF;
import static ch.uzh.csg.reimbursement.model.ExpenseState.DRAFT;
import static ch.uzh.csg.reimbursement.model.ExpenseState.REJECTED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_BE_ASSIGNED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_FINANCE_ADMIN;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_PROF;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_USER;
import static ch.uzh.csg.reimbursement.model.Role.FINANCE_ADMIN;
import static ch.uzh.csg.reimbursement.model.Role.PROF;
import static ch.uzh.csg.reimbursement.model.Role.UNI_ADMIN;
import static ch.uzh.csg.reimbursement.model.Role.USER;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import ch.uzh.csg.reimbursement.model.Expense;
import ch.uzh.csg.reimbursement.model.ExpenseItem;
import ch.uzh.csg.reimbursement.model.Token;
import ch.uzh.csg.reimbursement.model.User;
@Service
@Transactional
public class UserResourceAuthorizationService {
@Autowired
private UserService userService;
@Autowired
private ExpenseService expenseService;
public boolean checkEditAuthorization(Expense expense) {
return checkEditAuthorization(expense, userService.getLoggedInUser());
}
public boolean checkEditAuthorization(ExpenseItem expenseItem) {
return checkEditAuthorization(expenseItem.getExpense());
}
private boolean checkEditAuthorization(Expense expense, User user) {
if ((expense.getState().equals(DRAFT) || expense.getState().equals(REJECTED)) && expense.getUser().equals(user)) {
return true;
} else if (expense.getState().equals(ASSIGNED_TO_PROF) && expense.getAssignedManager() != null
&& expense.getAssignedManager().equals(user)) {
return true;
} else if ((expense.getState().equals(TO_BE_ASSIGNED) && user.getRoles().contains(FINANCE_ADMIN))
|| (expense.getFinanceAdmin() != null && expense.getFinanceAdmin().equals(user))) {
return true;
} else {
return false;
}
}
public boolean checkViewAuthorization(ExpenseItem expenseItem) {
return checkViewAuthorization(expenseItem.getExpense());
}
public boolean checkViewAuthorizationMobile(ExpenseItem expenseItem, Token token) {
return checkViewAuthorization(expenseItem.getExpense(), token.getUser());
}
public boolean checkViewAuthorizationMobile(Expense expense, Token token) {
return checkViewAuthorization(expense, token.getUser());
}
public boolean checkViewAuthorization(Expense expense) {
return checkViewAuthorization(expense, userService.getLoggedInUser());
}
private boolean checkViewAuthorization(Expense expense, User user) {
if (user.getRoles().contains(UNI_ADMIN)) {
return true;
} else if (expense.getUser().equals(user)) {
return true;
} else if (expense.getAssignedManager() != null && expense.getAssignedManager().equals(user)) {
return true;
} else if (userService.getLoggedInUser().getRoles().contains(FINANCE_ADMIN)) {
return true;
} else {
return false;
}
}
public boolean checkSignAuthorization(Expense expense) {
return checkSignAuthorization(expense, userService.getLoggedInUser());
}
private boolean checkSignAuthorization(Expense expense, User user) {
if (expense.getState().equals(TO_SIGN_BY_USER) && user.getRoles().contains(USER)) {
return true;
} else if (expense.getState().equals(TO_SIGN_BY_PROF) && user.getRoles().contains(PROF)) {
return true;
} else if (expense.getState().equals(TO_SIGN_BY_FINANCE_ADMIN) && user.getRoles().contains(FINANCE_ADMIN)) {
return true;
} else {
return false;
}
}
public boolean checkAssignAuthorization(Expense expense) {
if (expense.getExpenseItems().isEmpty()) {
return false;
} else {
return true;
}
}
} | made printed expenses public | src/main/java/ch/uzh/csg/reimbursement/service/UserResourceAuthorizationService.java | made printed expenses public |
|
Java | apache-2.0 | 2d01d8237d2ccbb80d84cfed9d924d48b524719c | 0 | michael-rapp/ChromeLikeTabSwitcher | /*
* Copyright 2016 Michael Rapp
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.tabswitcher;
import android.animation.Animator;
import android.animation.Animator.AnimatorListener;
import android.animation.AnimatorListenerAdapter;
import android.animation.ValueAnimator;
import android.animation.ValueAnimator.AnimatorUpdateListener;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.AttrRes;
import android.support.annotation.DrawableRes;
import android.support.annotation.MenuRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StringRes;
import android.support.annotation.StyleRes;
import android.support.v4.content.ContextCompat;
import android.support.v4.util.Pair;
import android.support.v4.view.ViewCompat;
import android.support.v7.widget.Toolbar;
import android.support.v7.widget.Toolbar.OnMenuItemClickListener;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.ViewPropertyAnimator;
import android.view.ViewTreeObserver.OnGlobalLayoutListener;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.Animation;
import android.view.animation.Animation.AnimationListener;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.view.animation.Transformation;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.Set;
import de.mrapp.android.tabswitcher.gesture.DragHelper;
import de.mrapp.android.tabswitcher.view.TabSwitcherButton;
import de.mrapp.android.util.DisplayUtil.Orientation;
import de.mrapp.android.util.ThemeUtil;
import de.mrapp.android.util.ViewUtil;
import static de.mrapp.android.util.Condition.ensureAtLeast;
import static de.mrapp.android.util.Condition.ensureNotNull;
import static de.mrapp.android.util.DisplayUtil.getOrientation;
/**
* A chrome-like tab switcher.
*
* @author Michael Rapp
* @since 1.0.0
*/
public class TabSwitcher extends FrameLayout implements OnGlobalLayoutListener, Tab.Callback {
/**
* Defines the interface, a class, which should be notified about a tab switcher's events, must
* implement.
*/
public interface Listener {
/**
* The method, which is invoked, when the tab switcher has been shown.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
*/
void onSwitcherShown(@NonNull final TabSwitcher tabSwitcher);
/**
* The method, which is invoked, when the tab switcher has been hidden.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
*/
void onSwitcherHidden(@NonNull final TabSwitcher tabSwitcher);
/**
* The method, which is invoked, when the currently selected tab has been changed.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
* @param selectedTabIndex
* The index of the currently selected tab as an {@link Integer} value or -1, if the
* tab switcher does not contain any tabs
* @param selectedTab
* The currently selected tab as an instance of the class {@link Tab} or null, if
* the tab switcher does not contain any tabs
*/
void onSelectionChanged(@NonNull final TabSwitcher tabSwitcher, int selectedTabIndex,
@Nullable Tab selectedTab);
/**
* The method, which is invoked, when a tab has been added to the tab switcher.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
* @param index
* The index of the tab, which has been added, as an {@link Integer} value
* @param tab
* The tab, which has been added, as an instance of the class {@link Tab}. The tab
* may not be null
*/
void onTabAdded(@NonNull final TabSwitcher tabSwitcher, int index, @NonNull Tab tab);
/**
* The method, which is invoked, when a tab has been removed from the tab switcher.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
* @param index
* The index of the tab, which has been removed, as an {@link Integer} value
* @param tab
* The tab, which has been removed, as an instance of the class {@link Tab}. The tab
* may not be null
*/
void onTabRemoved(@NonNull final TabSwitcher tabSwitcher, int index, @NonNull Tab tab);
/**
* The method, which is invoked, when all tabs have been removed from the tab switcher.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
*/
void onAllTabsRemoved(@NonNull final TabSwitcher tabSwitcher);
}
public enum AnimationType {
SWIPE_LEFT,
SWIPE_RIGHT
}
// TODO: Provide a built-in view holder
public static abstract class Decorator {
public int getViewType(@NonNull final Tab tab) {
return 0;
}
public int getViewTypeCount() {
return 1;
}
@NonNull
public abstract View onInflateView(@NonNull final LayoutInflater inflater,
@NonNull final ViewGroup parent, final int viewType);
public abstract void onShowTab(@NonNull final Context context,
@NonNull final TabSwitcher tabSwitcher,
@NonNull final View view, @NonNull final Tab tab,
final int viewType);
}
private class ChildViewRecycler {
private SparseArray<View> views;
@NonNull
public View inflateView(@NonNull final ViewGroup parent, final int viewType) {
View child = null;
if (views == null) {
views = new SparseArray<>(getDecorator().getViewTypeCount());
} else {
child = views.get(viewType);
}
if (child == null) {
child = getDecorator().onInflateView(inflater, parent, viewType);
views.put(viewType, child);
}
return child;
}
public void clearCache() {
if (views != null) {
views.clear();
views = null;
}
}
}
private static class PreviewDataBinder
extends AbstractDataBinder<Bitmap, Tab, ImageView, TabView> {
/**
* The tab switcher, the data binder belongs to.
*/
private final TabSwitcher tabSwitcher;
/**
* The view recycler, which is used to inflate child views.
*/
private final ChildViewRecycler childViewRecycler;
/**
* The view, which is rendered as a preview image.
*/
private View child;
/**
* Creates a new data binder, which allows to render preview images of tabs.
*
* @param tabSwitcher
* The tab switcher, the data binder belong to, as an instance of the class {@link
* TabSwitcher}. The tab switcher may not be null
* @param childViewRecycler
* The view recycler, which should be used to inflate child views, as an instance of
* the class {@link ChildViewRecycler}. The view recycler may not be null
*/
public PreviewDataBinder(@NonNull final TabSwitcher tabSwitcher,
@NonNull final ChildViewRecycler childViewRecycler) {
super(tabSwitcher.getContext());
this.tabSwitcher = tabSwitcher;
this.childViewRecycler = childViewRecycler;
}
@Override
protected void onPreExecute(@NonNull final ImageView view,
@NonNull final TabView... params) {
TabView tabView = params[0];
ViewHolder viewHolder = tabView.viewHolder;
child = viewHolder.child;
int viewType = tabSwitcher.getDecorator().getViewType(tabView.tab);
if (child == null) {
child = childViewRecycler.inflateView(viewHolder.childContainer, viewType);
// TODO: Must the view also be added to the parent? This is relevant when calling the showSwitcher-method, while the TabSwitcher is not yet inflated
} else {
viewHolder.child = null;
}
tabSwitcher.getDecorator()
.onShowTab(getContext(), tabSwitcher, child, tabView.tab, viewType);
}
@Nullable
@Override
protected Bitmap doInBackground(@NonNull final Tab key, @NonNull final TabView... params) {
Bitmap bitmap = Bitmap.createBitmap(child.getWidth(), child.getHeight(),
Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
child.draw(canvas);
// TODO: This is only for debugging purposes
Paint paint = new Paint();
paint.setColor(Color.RED);
canvas.drawCircle(100, 100, 40, paint);
return bitmap;
}
@Override
protected void onPostExecute(@NonNull final ImageView view, @Nullable final Bitmap data,
@NonNull final TabView... params) {
view.setImageBitmap(data);
view.setVisibility(data != null ? View.VISIBLE : View.GONE);
}
}
private class RecyclerAdapter extends ViewRecycler.Adapter<TabView, Integer> {
private final PreviewDataBinder dataBinder;
// TODO: Only add child view, if tab view is the selected one
private void addChildView(@NonNull final TabView tabView) {
ViewHolder viewHolder = tabView.viewHolder;
View view = viewHolder.child;
int viewType = getDecorator().getViewType(tabView.tab);
if (view == null) {
ViewGroup parent = viewHolder.childContainer;
view = childViewRecycler.inflateView(parent, viewType);
LayoutParams layoutParams =
new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
layoutParams.setMargins(getPaddingLeft(), getPaddingTop(), getPaddingRight(),
getPaddingBottom());
parent.addView(view, 0, layoutParams);
viewHolder.child = view;
}
viewHolder.previewImageView.setVisibility(View.GONE);
viewHolder.previewImageView.setImageBitmap(null);
viewHolder.borderView.setVisibility(View.GONE);
getDecorator().onShowTab(getContext(), TabSwitcher.this, view, tabView.tab, viewType);
}
private void renderChildView(@NonNull final TabView tabView) {
ViewHolder viewHolder = tabView.viewHolder;
viewHolder.borderView.setVisibility(View.VISIBLE);
boolean async = viewHolder.child == null;
dataBinder.load(tabView.tab, viewHolder.previewImageView, async, tabView);
if (!async) {
removeChildView(viewHolder);
}
}
private void removeChildView(@NonNull final ViewHolder viewHolder) {
if (viewHolder.childContainer.getChildCount() > 2) {
viewHolder.childContainer.removeViewAt(0);
}
}
public RecyclerAdapter() {
this.dataBinder = new PreviewDataBinder(TabSwitcher.this, childViewRecycler);
}
public void clearCachedBitmaps() {
dataBinder.clearCache();
}
@NonNull
@Override
public View onInflateView(@NonNull final LayoutInflater inflater,
@Nullable final ViewGroup parent, @NonNull final TabView tabView,
final int viewType, @NonNull final Integer... params) {
ViewHolder viewHolder = new ViewHolder();
View view = inflater.inflate(
isDraggingHorizontally() ? R.layout.tab_view_horizontally : R.layout.tab_view,
tabContainer, false);
Drawable backgroundDrawable =
ContextCompat.getDrawable(getContext(), R.drawable.tab_background);
ViewUtil.setBackground(view, backgroundDrawable);
int padding = tabInset + tabBorderWidth;
view.setPadding(padding, tabInset, padding, padding);
viewHolder.titleContainer = (ViewGroup) view.findViewById(R.id.tab_title_container);
viewHolder.titleTextView = (TextView) view.findViewById(R.id.tab_title_text_view);
viewHolder.closeButton = (ImageButton) view.findViewById(R.id.close_tab_button);
viewHolder.childContainer = (ViewGroup) view.findViewById(R.id.child_container);
viewHolder.previewImageView = (ImageView) view.findViewById(R.id.preview_image_view);
adaptChildAndPreviewMargins(viewHolder);
viewHolder.borderView = view.findViewById(R.id.border_view);
Drawable borderDrawable =
ContextCompat.getDrawable(getContext(), R.drawable.tab_border);
ViewUtil.setBackground(viewHolder.borderView, borderDrawable);
LayoutParams layoutParams =
new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
int borderMargin = -(tabInset + tabBorderWidth);
layoutParams.leftMargin = borderMargin;
layoutParams.topMargin = -(tabInset + tabTitleContainerHeight);
layoutParams.rightMargin = borderMargin;
layoutParams.bottomMargin = params.length > 0 ? params[0] : borderMargin;
view.setLayoutParams(layoutParams);
view.setTag(viewHolder);
tabView.view = view;
tabView.viewHolder = viewHolder;
return view;
}
@Override
public void onShowView(@NonNull final Context context, @NonNull final View view,
@NonNull final TabView tabView, @NonNull final Integer... params) {
if (!tabView.isInflated()) {
tabView.viewHolder = (ViewHolder) view.getTag();
tabView.view = view;
}
Tab tab = tabView.tab;
ViewHolder viewHolder = (ViewHolder) view.getTag();
adaptTitle(viewHolder, tab);
adaptIcon(viewHolder, tab);
adaptCloseButton(viewHolder, tab);
adaptColor(view, viewHolder, tab);
if (!isSwitcherShown()) {
addChildView(tabView);
} else {
renderChildView(tabView);
}
}
@Override
public void onRemoveView(@NonNull final View view, @NonNull final TabView tabView) {
ViewHolder viewHolder = (ViewHolder) view.getTag();
removeChildView(viewHolder);
viewHolder.child = null;
}
}
private class TabView {
private int index;
@NonNull
private Tab tab;
@NonNull
private Tag tag;
private View view;
private ViewHolder viewHolder;
public TabView(final int index) {
ensureAtLeast(index, 0, "The index must be at least 0");
this.index = index;
this.tab = getTab(index);
this.view = viewRecycler.getView(this);
if (view != null) {
this.viewHolder = (ViewHolder) view.getTag();
} else {
this.viewHolder = null;
}
this.tag = tags.get(tab);
if (tag == null) {
tag = new Tag();
tags.put(tab, tag);
}
}
public boolean isInflated() {
return view != null && viewHolder != null;
}
public boolean isVisible() {
return (tag.state != State.TOP_MOST_HIDDEN && tag.state != State.BOTTOM_MOST_HIDDEN) ||
tag.closing;
}
@Override
public final String toString() {
return "TabView [index = " + index + "]";
}
@Override
public int hashCode() {
return tab.hashCode();
}
@Override
public boolean equals(final Object obj) {
if (obj == null)
return false;
if (obj.getClass() != getClass())
return false;
TabView other = (TabView) obj;
return tab.equals(other.tab);
}
}
private class TabViewComparator implements Comparator<TabView> {
@Override
public int compare(TabView o1, TabView o2) {
return ((Integer) o1.index).compareTo(o2.index);
}
}
private class Iterator implements java.util.Iterator<TabView> {
private boolean reverse;
private int index;
private int end;
private TabView current;
private TabView previous;
private TabView first;
public Iterator() {
this(false);
}
public Iterator(final boolean reverse) {
this(reverse, -1);
}
public Iterator(final boolean reverse, final int start) {
this(reverse, start, -1);
}
public Iterator(final boolean reverse, final int start, final int end) {
this.reverse = reverse;
this.end = end != -1 ? (reverse ? end - 1 : end + 1) : -1;
this.previous = null;
this.index = start != -1 ? start : (reverse ? getCount() - 1 : 0);
int previousIndex = reverse ? this.index + 1 : this.index - 1;
if (previousIndex >= 0 && previousIndex < getCount()) {
this.current = new TabView(previousIndex);
} else {
this.current = null;
}
}
public TabView first() {
return first;
}
public TabView previous() {
return previous;
}
public TabView peek() {
if (hasNext()) {
return new TabView(index);
}
return null;
}
@Override
public boolean hasNext() {
if (index == end) {
return false;
} else {
if (reverse) {
return index >= 0;
} else {
return getCount() - index >= 1;
}
}
}
@Override
public TabView next() {
if (hasNext()) {
previous = current;
if (first == null) {
first = current;
}
current = new TabView(index);
index += reverse ? -1 : 1;
return current;
}
return null;
}
}
private static class ViewHolder {
private ViewGroup titleContainer;
private TextView titleTextView;
private ImageButton closeButton;
private ViewGroup childContainer;
private View child;
private ImageView previewImageView;
private View borderView;
}
private static class Tag implements Cloneable {
private float projectedPosition;
private float actualPosition;
private float distance;
private State state;
private boolean closing;
@Override
public Tag clone() {
Tag clone;
try {
clone = (Tag) super.clone();
} catch (ClassCastException | CloneNotSupportedException e) {
clone = new Tag();
}
clone.projectedPosition = projectedPosition;
clone.actualPosition = actualPosition;
clone.distance = distance;
clone.state = state;
clone.closing = closing;
return clone;
}
}
private enum State {
STACKED_TOP,
TOP_MOST_HIDDEN,
TOP_MOST,
VISIBLE,
BOTTOM_MOST_HIDDEN,
STACKED_BOTTOM
}
private enum ScrollDirection {
NONE,
DRAGGING_UP,
DRAGGING_DOWN,
OVERSHOOT_UP,
OVERSHOOT_DOWN;
}
private enum Axis {
DRAGGING_AXIS,
ORTHOGONAL_AXIS
}
private class FlingAnimation extends Animation {
private final float flingDistance;
public FlingAnimation(final float flingDistance) {
this.flingDistance = flingDistance;
}
@Override
protected void applyTransformation(final float interpolatedTime, final Transformation t) {
if (dragAnimation != null) {
handleDrag(flingDistance * interpolatedTime, 0);
}
}
}
private static final int STACKED_TAB_COUNT = 3;
private static final float NON_LINEAR_DRAG_FACTOR = 0.5f;
private static final float MAX_DOWN_OVERSHOOT_ANGLE = 3f;
private static final float MAX_UP_OVERSHOOT_ANGLE = 2f;
private int[] padding;
private Toolbar toolbar;
private ViewGroup tabContainer;
private Set<Listener> listeners;
private LayoutInflater inflater;
private ChildViewRecycler childViewRecycler;
private ViewRecycler<TabView, Integer> viewRecycler;
private RecyclerAdapter recyclerAdapter;
private Decorator decorator;
private Queue<Runnable> pendingActions;
/**
* A list, which contains the tab switcher's tabs.
*/
private List<Tab> tabs;
// TODO: Only inflated views should be associated with tags. This allows to abandon this map.
private Map<Tab, Tag> tags;
private int selectedTabIndex;
private int tabBackgroundColor;
private int dragThreshold;
/**
* An instance of the class {@link DragHelper}, which is used to recognize drag gestures.
*/
private DragHelper dragHelper;
private DragHelper overshootDragHelper;
private DragHelper closeDragHelper;
private VelocityTracker velocityTracker;
private boolean switcherShown;
private int stackedTabSpacing;
private int minTabSpacing;
private int maxTabSpacing;
private int maxOvershootDistance;
private float minFlingVelocity;
private float maxFlingVelocity;
private float minCloseFlingVelocity;
private float closedTabAlpha;
private float closedTabScale;
private int tabInset;
private int tabBorderWidth;
private int tabTitleContainerHeight;
private int tabViewBottomMargin;
private ScrollDirection scrollDirection;
private TabView draggedTabView;
private int lastAttachedIndex;
private float attachedPosition;
private float topDragThreshold = -Float.MIN_VALUE;
private float bottomDragThreshold = Float.MAX_VALUE;
private int pointerId = -1;
private Animation dragAnimation;
@Deprecated
private ViewPropertyAnimator relocateAnimation;
private ViewPropertyAnimator toolbarAnimation;
private int runningAnimations;
/**
* Initializes the view.
*
* @param attributeSet
* The attribute set, which should be used to initialize the view, as an instance of the
* type {@link AttributeSet} or null, if no attributes should be obtained
* @param defaultStyle
* The default style to apply to this view. If 0, no style will be applied (beyond what
* is included in the theme). This may either be an attribute resource, whose value will
* be retrieved from the current theme, or an explicit style resource
* @param defaultStyleResource
* A resource identifier of a style resource that supplies default values for the view,
* used only if the default style is 0 or can not be found in the theme. Can be 0 to not
* look for defaults
*/
private void initialize(@Nullable final AttributeSet attributeSet,
@AttrRes final int defaultStyle,
@StyleRes final int defaultStyleResource) {
getViewTreeObserver().addOnGlobalLayoutListener(this);
runningAnimations = 0;
inflater = LayoutInflater.from(getContext());
padding = new int[]{0, 0, 0, 0};
listeners = new LinkedHashSet<>();
pendingActions = new LinkedList<>();
tabs = new ArrayList<>();
tags = new HashMap<>();
selectedTabIndex = -1;
switcherShown = false;
Resources resources = getResources();
dragThreshold = resources.getDimensionPixelSize(R.dimen.drag_threshold);
dragHelper = new DragHelper(dragThreshold);
overshootDragHelper = new DragHelper(0);
closeDragHelper =
new DragHelper(resources.getDimensionPixelSize(R.dimen.close_drag_threshold));
stackedTabSpacing = resources.getDimensionPixelSize(R.dimen.stacked_tab_spacing);
minTabSpacing = resources.getDimensionPixelSize(R.dimen.min_tab_spacing);
maxTabSpacing = resources.getDimensionPixelSize(R.dimen.max_tab_spacing);
maxOvershootDistance = resources.getDimensionPixelSize(R.dimen.max_overshoot_distance);
ViewConfiguration configuration = ViewConfiguration.get(getContext());
minFlingVelocity = configuration.getScaledMinimumFlingVelocity();
maxFlingVelocity = configuration.getScaledMaximumFlingVelocity();
minCloseFlingVelocity = resources.getDimensionPixelSize(R.dimen.min_close_fling_velocity);
TypedValue typedValue = new TypedValue();
resources.getValue(R.dimen.closed_tab_scale, typedValue, true);
closedTabScale = typedValue.getFloat();
resources.getValue(R.dimen.closed_tab_alpha, typedValue, true);
closedTabAlpha = typedValue.getFloat();
tabInset = resources.getDimensionPixelSize(R.dimen.tab_inset);
tabBorderWidth = resources.getDimensionPixelSize(R.dimen.tab_border_width);
tabTitleContainerHeight =
resources.getDimensionPixelSize(R.dimen.tab_title_container_height);
tabViewBottomMargin = -1;
scrollDirection = ScrollDirection.NONE;
inflateLayout();
childViewRecycler = new ChildViewRecycler();
recyclerAdapter = new RecyclerAdapter();
viewRecycler = new ViewRecycler<>(tabContainer, recyclerAdapter, inflater,
Collections.reverseOrder(new TabViewComparator()));
obtainStyledAttributes(attributeSet, defaultStyle, defaultStyleResource);
}
private void inflateLayout() {
toolbar = (Toolbar) inflater.inflate(R.layout.tab_switcher_toolbar, this, false);
toolbar.setVisibility(View.INVISIBLE);
addView(toolbar, LayoutParams.MATCH_PARENT,
ThemeUtil.getDimensionPixelSize(getContext(), R.attr.actionBarSize));
tabContainer = new FrameLayout(getContext());
addView(tabContainer, LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
}
private void notifyOnSwitcherShown() {
for (Listener listener : listeners) {
listener.onSwitcherShown(this);
}
}
private void notifyOnSwitcherHidden() {
for (Listener listener : listeners) {
listener.onSwitcherHidden(this);
}
}
private void notifyOnSelectionChanged(final int selectedTabIndex,
@Nullable final Tab selectedTab) {
for (Listener listener : listeners) {
listener.onSelectionChanged(this, selectedTabIndex, selectedTab);
}
}
private void notifyOnTabAdded(final int index, @NonNull final Tab tab) {
for (Listener listener : listeners) {
listener.onTabAdded(this, index, tab);
}
}
private void notifyOnTabRemoved(final int index, @NonNull final Tab tab) {
for (Listener listener : listeners) {
listener.onTabRemoved(this, index, tab);
}
}
private void notifyOnAllTabsRemoved() {
for (Listener listener : listeners) {
listener.onAllTabsRemoved(this);
}
}
private OnClickListener createCloseButtonClickListener(@NonNull final Tab tab) {
return new OnClickListener() {
@Override
public void onClick(final View v) {
removeTab(tab);
}
};
}
private void animateClose(@NonNull final TabView tabView, final boolean close,
final float flingVelocity, final long startDelay,
@Nullable final AnimatorListener listener) {
View view = tabView.view;
float scale = getScale(view, true);
float closedTabPosition = calculateClosedTabPosition();
float position = getPosition(Axis.ORTHOGONAL_AXIS, view);
float targetPosition =
close ? (position < 0 ? -1 * closedTabPosition : closedTabPosition) : 0;
float distance = Math.abs(targetPosition - position);
long animationDuration;
if (flingVelocity >= minCloseFlingVelocity) {
animationDuration = Math.round((distance / flingVelocity) * 1000);
} else {
animationDuration = Math.round(
getResources().getInteger(android.R.integer.config_longAnimTime) *
(distance / closedTabPosition));
}
ViewPropertyAnimator animation = view.animate();
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setListener(createAnimationListenerWrapper(listener));
animation.setDuration(animationDuration);
animatePosition(Axis.ORTHOGONAL_AXIS, animation, view, targetPosition, true);
animateScale(Axis.ORTHOGONAL_AXIS, animation, close ? closedTabScale * scale : scale);
animateScale(Axis.DRAGGING_AXIS, animation, close ? closedTabScale * scale : scale);
animation.alpha(close ? closedTabAlpha : 1);
animation.setStartDelay(startDelay);
animation.start();
}
private AnimatorListener createCloseAnimationListener(@NonNull final TabView closedTabView,
final boolean close) {
return new AnimatorListenerAdapter() {
private void adjustActualPositionOfStackedTabViews(final boolean reverse) {
Iterator iterator = new Iterator(reverse, closedTabView.index);
TabView tabView;
Float previousActualPosition = null;
while ((tabView = iterator.next()) != null) {
float actualPosition = tabView.tag.actualPosition;
if (previousActualPosition != null) {
tabView.tag.actualPosition = previousActualPosition;
applyTag(closedTabView);
}
previousActualPosition = actualPosition;
}
}
private void relocateWhenStackedTabViewWasRemoved(final boolean top) {
long startDelay = getResources().getInteger(android.R.integer.config_shortAnimTime);
int start = closedTabView.index + (top ? -1 : 1);
Iterator iterator = new Iterator(top, closedTabView.index);
TabView tabView;
Float previousProjectedPosition = null;
while ((tabView = iterator.next()) != null &&
(tabView.tag.state == State.TOP_MOST_HIDDEN ||
tabView.tag.state == State.STACKED_TOP ||
tabView.tag.state == State.BOTTOM_MOST_HIDDEN ||
tabView.tag.state == State.STACKED_BOTTOM)) {
float projectedPosition = tabView.tag.projectedPosition;
if (previousProjectedPosition != null) {
if (tabView.tag.state == State.TOP_MOST_HIDDEN ||
tabView.tag.state == State.BOTTOM_MOST_HIDDEN) {
TabView previous = iterator.previous();
tabView.tag.state = previous.tag.state;
if (top) {
tabView.tag.projectedPosition = previousProjectedPosition;
long delay = (start + 1 - tabView.index) * startDelay;
animateRelocate(tabView, previousProjectedPosition, delay,
createRelocateAnimationListener(tabView, null, true));
} else {
adaptVisibility(tabView);
}
break;
} else {
TabView peek = iterator.peek();
State peekState = peek != null ? peek.tag.state : null;
boolean reset = !iterator.hasNext() ||
(peekState != State.STACKED_TOP &&
peekState != State.STACKED_BOTTOM);
tabView.tag.projectedPosition = previousProjectedPosition;
long delay =
(top ? (start + 1 - tabView.index) : (tabView.index - start)) *
startDelay;
animateRelocate(tabView, previousProjectedPosition, delay,
createRelocateAnimationListener(tabView, null, reset));
}
}
previousProjectedPosition = projectedPosition;
}
adjustActualPositionOfStackedTabViews(!top);
}
private void relocateWhenVisibleTabViewWasRemoved() {
int start = closedTabView.index - 1;
if (start >= 0) {
long startDelay =
getResources().getInteger(android.R.integer.config_shortAnimTime);
Iterator iterator = new Iterator(true, start);
TabView tabView;
int firstStackedTabIndex = -1;
while ((tabView = iterator.next()) != null && firstStackedTabIndex == -1) {
if (tabView.tag.state == State.BOTTOM_MOST_HIDDEN ||
tabView.tag.state == State.STACKED_BOTTOM) {
firstStackedTabIndex = tabView.index;
}
TabView previous = iterator.previous();
boolean reset = !iterator.hasNext() || firstStackedTabIndex != -1;
AnimatorListener listener =
createRelocateAnimationListener(tabView, previous.tag, reset);
animateRelocate(tabView, previous.tag.projectedPosition,
(start + 1 - tabView.index) * startDelay, tabView.index == start ?
createRelocateAnimationListenerWrapper(closedTabView,
listener) : listener);
}
if (firstStackedTabIndex != -1) {
iterator = new Iterator(true, firstStackedTabIndex);
Float previousActualPosition = null;
while ((tabView = iterator.next()) != null) {
float actualPosition = tabView.tag.actualPosition;
if (previousActualPosition != null) {
tabView.tag.actualPosition = previousActualPosition;
}
previousActualPosition = actualPosition;
}
}
}
}
private void animateRelocate(@NonNull final TabView tabView,
final float relocatePosition, final long startDelay,
@Nullable final AnimatorListener listener) {
View view = tabView.view;
relocateAnimation = view.animate();
relocateAnimation.setListener(listener);
relocateAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
relocateAnimation.setDuration(
getResources().getInteger(android.R.integer.config_mediumAnimTime));
animatePosition(Axis.DRAGGING_AXIS, relocateAnimation, view, relocatePosition,
true);
relocateAnimation.setStartDelay(startDelay);
relocateAnimation.start();
}
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
if (close) {
if (closedTabView.tag.state == State.BOTTOM_MOST_HIDDEN) {
adjustActualPositionOfStackedTabViews(true);
} else if (closedTabView.tag.state == State.TOP_MOST_HIDDEN) {
adjustActualPositionOfStackedTabViews(false);
} else if (closedTabView.tag.state == State.STACKED_BOTTOM) {
relocateWhenStackedTabViewWasRemoved(false);
} else if (closedTabView.tag.state == State.STACKED_TOP) {
relocateWhenStackedTabViewWasRemoved(true);
} else {
relocateWhenVisibleTabViewWasRemoved();
}
}
}
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
if (close) {
int index = closedTabView.index;
viewRecycler.remove(closedTabView);
Tab tab = tabs.remove(index);
tab.removeCallback(TabSwitcher.this);
tags.remove(tab);
notifyOnTabRemoved(index, tab);
if (isEmpty()) {
selectedTabIndex = -1;
notifyOnSelectionChanged(-1, null);
animateToolbarVisibility(isToolbarShown(), 0);
} else if (selectedTabIndex == closedTabView.index) {
if (selectedTabIndex > 0) {
selectedTabIndex--;
}
notifyOnSelectionChanged(selectedTabIndex, getTab(selectedTabIndex));
}
} else {
View view = closedTabView.view;
adaptTopMostTabViewWhenClosingAborted(closedTabView, closedTabView.index + 1);
closedTabView.tag.closing = false;
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
handleRelease(null);
animateToolbarVisibility(true, 0);
}
draggedTabView = null;
}
};
}
private float getDefaultPivot(@NonNull final Axis axis, @NonNull final View view) {
if (axis == Axis.DRAGGING_AXIS) {
return isDraggingHorizontally() ? getSize(axis, view) / 2f : 0;
} else {
return isDraggingHorizontally() ? 0 : getSize(axis, view) / 2f;
}
}
private float getPivotWhenClosing(@NonNull final Axis axis, @NonNull final View view) {
if (axis == Axis.DRAGGING_AXIS) {
return maxTabSpacing;
} else {
return getDefaultPivot(axis, view);
}
}
private float getPivotOnOvershootDown(@NonNull final Axis axis, @NonNull final View view) {
if (axis == Axis.DRAGGING_AXIS) {
return maxTabSpacing;
} else {
return getSize(axis, view) / 2f;
}
}
private float getPivotOnOvershootUp(@NonNull final Axis axis, @NonNull final View view) {
return getSize(axis, view) / 2f;
}
private AnimatorListener createRelocateAnimationListenerWrapper(
@NonNull final TabView closedTabView, @Nullable final AnimatorListener listener) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
if (listener != null) {
listener.onAnimationStart(animation);
}
}
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
adaptTopMostTabViewWhenClosingAborted(closedTabView, closedTabView.index);
if (listener != null) {
listener.onAnimationEnd(animation);
}
}
};
}
private AnimatorListener createRelocateAnimationListener(@NonNull final TabView tabView,
@Nullable final Tag tag,
final boolean reset) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
View view = tabView.view;
view.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
if (tag != null) {
tags.put(tabView.tab, tag);
tabView.tag = tag;
}
applyTag(tabView);
if (reset) {
relocateAnimation = null;
executePendingAction();
}
}
};
}
/**
* Obtains all attributes froma specific attribute set.
*
* @param attributeSet
* The attribute set, the attributes should be obtained from, as an instance of the type
* {@link AttributeSet} or null, if no attributes should be obtained
* @param defaultStyle
* The default style to apply to this view. If 0, no style will be applied (beyond what
* is included in the theme). This may either be an attribute resource, whose value will
* be retrieved from the current theme, or an explicit style resource
* @param defaultStyleResource
* A resource identifier of a style resource that supplies default values for the view,
* used only if the default style is 0 or can not be found in the theme. Can be 0 to not
* look for defaults
*/
private void obtainStyledAttributes(@Nullable final AttributeSet attributeSet,
@AttrRes final int defaultStyle,
@StyleRes final int defaultStyleResource) {
TypedArray typedArray = getContext()
.obtainStyledAttributes(attributeSet, R.styleable.TabSwitcher, defaultStyle,
defaultStyleResource);
try {
obtainBackground(typedArray);
obtainTabBackgroundColor(typedArray);
} finally {
typedArray.recycle();
}
}
/**
* Obtains the view's background from a specific typed array.
*
* @param typedArray
* The typed array, the background should be obtained from, as an instance of the class
* {@link TypedArray}. The typed array may not be null
*/
private void obtainBackground(@NonNull final TypedArray typedArray) {
int resourceId = typedArray.getResourceId(R.styleable.TabSwitcher_android_background, 0);
if (resourceId != 0) {
ViewUtil.setBackground(this, ContextCompat.getDrawable(getContext(), resourceId));
} else {
int defaultValue =
ContextCompat.getColor(getContext(), R.color.tab_switcher_background_color);
int color =
typedArray.getColor(R.styleable.TabSwitcher_android_background, defaultValue);
setBackgroundColor(color);
}
}
/**
* Obtains the background color of tabs from a specific typed array.
*
* @param typedArray
* The typed array, the background color should be obtained from, as an instance of the
* class {@link TypedArray}. The typed array may not be null
*/
private void obtainTabBackgroundColor(@NonNull final TypedArray typedArray) {
int defaultValue = ContextCompat.getColor(getContext(), R.color.tab_background_color);
tabBackgroundColor =
typedArray.getColor(R.styleable.TabSwitcher_tabBackgroundColor, defaultValue);
}
private int getPadding(@NonNull final Axis axis, final int gravity) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return gravity == Gravity.START ? getPaddingTop() : getPaddingBottom();
} else {
return gravity == Gravity.START ? getPaddingLeft() : getPaddingRight();
}
}
private Axis getOrientationInvariantAxis(@NonNull final Axis axis) {
if (isDraggingHorizontally()) {
return axis == Axis.DRAGGING_AXIS ? Axis.ORTHOGONAL_AXIS : Axis.DRAGGING_AXIS;
}
return axis;
}
private boolean isDraggingHorizontally() {
return getOrientation(getContext()) == Orientation.LANDSCAPE;
}
private float getScale(@NonNull final View view, final boolean includePadding) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
float width = view.getWidth();
float targetWidth = width + layoutParams.leftMargin + layoutParams.rightMargin -
(includePadding ? getPaddingLeft() + getPaddingRight() : 0) -
(isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing : 0);
return targetWidth / width;
}
private float getSize(@NonNull final Axis axis, @NonNull final View view) {
return getSize(axis, view, false);
}
private float getSize(@NonNull final Axis axis, @NonNull final View view,
final boolean includePadding) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return view.getHeight() * getScale(view, includePadding);
} else {
return view.getWidth() * getScale(view, includePadding);
}
}
private float getPosition(@NonNull final Axis axis, @NonNull final MotionEvent event) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return event.getY();
} else {
return event.getX();
}
}
private float getPosition(@NonNull final Axis axis, @NonNull final View view) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return view.getY() -
(isToolbarShown() && isSwitcherShown() ? toolbar.getHeight() - tabInset : 0) -
getPadding(axis, Gravity.START);
} else {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
return view.getX() - layoutParams.leftMargin - getPaddingLeft() / 2f +
getPaddingRight() / 2f +
(isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing / 2f : 0);
}
}
private void setPosition(@NonNull final Axis axis, @NonNull final View view,
final float position) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
view.setY((isToolbarShown() && isSwitcherShown() ? toolbar.getHeight() - tabInset : 0) +
getPadding(axis, Gravity.START) + position);
} else {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
view.setX(position + layoutParams.leftMargin + getPaddingLeft() / 2f -
getPaddingRight() / 2f -
(isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing / 2f : 0));
}
}
private void animatePosition(@NonNull final Axis axis,
@NonNull final ViewPropertyAnimator animator,
@NonNull final View view, final float position,
final boolean includePadding) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
animator.y(
(isToolbarShown() && isSwitcherShown() ? toolbar.getHeight() - tabInset : 0) +
(includePadding ? getPadding(axis, Gravity.START) : 0) + position);
} else {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
animator.x(position + layoutParams.leftMargin +
(includePadding ? getPaddingLeft() / 2f - getPaddingRight() / 2f : 0) -
(isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing / 2f : 0));
}
}
private float getRotation(@NonNull final Axis axis, @NonNull final View view) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return view.getRotationY();
} else {
return view.getRotationX();
}
}
private void setRotation(@NonNull final Axis axis, @NonNull final View view,
final float angle) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
view.setRotationY(isDraggingHorizontally() ? -1 * angle : angle);
} else {
view.setRotationX(isDraggingHorizontally() ? -1 * angle : angle);
}
}
private void animateRotation(@NonNull final Axis axis,
@NonNull final ViewPropertyAnimator animator, final float angle) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
animator.rotationY(isDraggingHorizontally() ? -1 * angle : angle);
} else {
animator.rotationX(isDraggingHorizontally() ? -1 * angle : angle);
}
}
private void setScale(@NonNull final Axis axis, @NonNull final View view, final float scale) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
view.setScaleY(scale);
} else {
view.setScaleX(scale);
}
}
private void animateScale(@NonNull final Axis axis,
@NonNull final ViewPropertyAnimator animator, final float scale) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
animator.scaleY(scale);
} else {
animator.scaleX(scale);
}
}
private void setPivot(@NonNull final Axis axis, @NonNull final View view, final float pivot) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
float newPivot = pivot - layoutParams.topMargin - tabTitleContainerHeight;
view.setTranslationY(view.getTranslationY() +
(view.getPivotY() - newPivot) * (1 - view.getScaleY()));
view.setPivotY(newPivot);
} else {
float newPivot = pivot - layoutParams.leftMargin;
view.setTranslationX(view.getTranslationX() +
(view.getPivotX() - newPivot) * (1 - view.getScaleX()));
view.setPivotX(newPivot);
}
}
public TabSwitcher(@NonNull final Context context) {
this(context, null);
}
public TabSwitcher(@NonNull final Context context, @Nullable final AttributeSet attributeSet) {
super(context, attributeSet);
initialize(attributeSet, 0, 0);
}
public TabSwitcher(@NonNull final Context context, @Nullable final AttributeSet attributeSet,
@AttrRes final int defaultStyle) {
super(context, attributeSet, defaultStyle);
initialize(attributeSet, defaultStyle, 0);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public TabSwitcher(@NonNull final Context context, @Nullable final AttributeSet attributeSet,
@AttrRes final int defaultStyle, @StyleRes final int defaultStyleResource) {
super(context, attributeSet, defaultStyle, defaultStyleResource);
initialize(attributeSet, defaultStyle, defaultStyleResource);
}
public final void addTab(@NonNull final Tab tab) {
addTab(tab, getCount());
}
public final void addTab(@NonNull final Tab tab, final int index) {
addTab(tab, index, AnimationType.SWIPE_RIGHT);
}
// TODO: Add support for adding tab, while switcher is shown
public final void addTab(@NonNull final Tab tab, final int index,
@NonNull final AnimationType animationType) {
ensureNotNull(tab, "The tab may not be null");
ensureNotNull(animationType, "The animation type may not be null");
enqueuePendingAction(new Runnable() {
@Override
public void run() {
tabs.add(index, tab);
tab.addCallback(TabSwitcher.this);
notifyOnTabAdded(index, tab);
if (getCount() == 1) {
selectedTabIndex = 0;
notifyOnSelectionChanged(0, tab);
}
if (!isSwitcherShown()) {
toolbar.setAlpha(0);
if (selectedTabIndex == index && ViewCompat.isLaidOut(TabSwitcher.this)) {
viewRecycler.inflate(new TabView(index));
}
} else {
TabView tabView = new TabView(index);
tabView.view.getViewTreeObserver().addOnGlobalLayoutListener(
createAddTabViewLayoutListener(tabView, animationType));
}
}
});
}
private OnGlobalLayoutListener createAddTabViewLayoutListener(@NonNull final TabView tabView,
@NonNull final AnimationType animationType) {
return new OnGlobalLayoutListener() {
@SuppressWarnings("deprecation")
@Override
public void onGlobalLayout() {
View view = tabView.view;
ViewUtil.removeOnGlobalLayoutListener(view.getViewTreeObserver(), this);
view.setVisibility(View.VISIBLE);
view.setAlpha(closedTabAlpha);
float closedPosition = calculateClosedTabPosition();
float dragPosition = getPosition(Axis.DRAGGING_AXIS,
tabContainer.getChildAt(getChildIndex(tabView.index)));
float scale = getScale(view, true);
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
setPosition(Axis.ORTHOGONAL_AXIS, view,
animationType == AnimationType.SWIPE_LEFT ? -1 * closedPosition :
closedPosition);
setPosition(Axis.DRAGGING_AXIS, view, dragPosition);
setScale(Axis.ORTHOGONAL_AXIS, view, scale);
setScale(Axis.DRAGGING_AXIS, view, scale);
setPivot(Axis.DRAGGING_AXIS, view, getPivotWhenClosing(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getPivotWhenClosing(Axis.ORTHOGONAL_AXIS, view));
setScale(Axis.ORTHOGONAL_AXIS, view, closedTabScale * scale);
setScale(Axis.DRAGGING_AXIS, view, closedTabScale * scale);
animateClose(tabView, false, 0, 0, createAddAnimationListener(tabView));
}
};
}
private AnimatorListener createAddAnimationListener(@NonNull final TabView tabView) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
applyTag(tabView);
}
};
}
public final void removeTab(@NonNull final Tab tab) {
ensureNotNull(tab, "The tab may not be null");
enqueuePendingAction(new Runnable() {
@Override
public void run() {
int index = indexOfOrThrowException(tab);
TabView tabView = new TabView(index);
if (!isSwitcherShown()) {
viewRecycler.remove(tabView);
Tab tab = tabs.remove(index);
tab.removeCallback(TabSwitcher.this);
tags.remove(tab);
notifyOnTabRemoved(index, tab);
if (isEmpty()) {
selectedTabIndex = -1;
notifyOnSelectionChanged(-1, null);
toolbar.setAlpha(isToolbarShown() ? 1 : 0);
} else if (selectedTabIndex == index) {
if (selectedTabIndex > 0) {
selectedTabIndex--;
}
viewRecycler.inflate(new TabView(selectedTabIndex));
notifyOnSelectionChanged(selectedTabIndex, getTab(selectedTabIndex));
}
} else {
adaptTopMostTabViewWhenClosing(tabView, tabView.index + 1);
tabView.tag.closing = true;
View view = tabView.view;
setPivot(Axis.DRAGGING_AXIS, view,
getPivotWhenClosing(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getPivotWhenClosing(Axis.ORTHOGONAL_AXIS, view));
animateClose(tabView, true, 0, 0, createCloseAnimationListener(tabView, true));
}
}
});
}
public final void clear() {
enqueuePendingAction(new Runnable() {
@Override
public void run() {
if (!isSwitcherShown()) {
for (int i = tabs.size() - 1; i >= 0; i--) {
Tab tab = tabs.remove(i);
tab.removeCallback(TabSwitcher.this);
}
selectedTabIndex = -1;
viewRecycler.removeAll();
notifyOnSelectionChanged(-1, null);
notifyOnAllTabsRemoved();
toolbar.setAlpha(isToolbarShown() ? 1 : 0);
} else {
Iterator iterator = new Iterator(true);
TabView tabView;
int startDelay = 0;
while ((tabView = iterator.next()) != null) {
TabView previous = iterator.previous();
if (tabView.tag.state == State.VISIBLE ||
previous != null && previous.tag.state == State.VISIBLE) {
startDelay += getResources()
.getInteger(android.R.integer.config_shortAnimTime);
}
if (tabView.isInflated()) {
animateClose(tabView, true, 0, startDelay,
!iterator.hasNext() ? createClearAnimationListener() : null);
}
}
}
}
});
}
private void animateToolbarVisibility(final boolean visible, final long startDelay) {
if (toolbarAnimation != null) {
toolbarAnimation.cancel();
}
float targetAlpha = visible ? 1 : 0;
if (toolbar.getAlpha() != targetAlpha) {
toolbarAnimation = toolbar.animate();
toolbarAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
toolbarAnimation.setDuration(
getResources().getInteger(android.R.integer.config_mediumAnimTime));
toolbarAnimation.setStartDelay(startDelay);
toolbarAnimation.alpha(targetAlpha);
toolbarAnimation.start();
}
}
private AnimatorListener createClearAnimationListener() {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
for (int i = tabs.size() - 1; i >= 0; i--) {
Tab tab = tabs.remove(i);
tab.removeCallback(TabSwitcher.this);
}
selectedTabIndex = -1;
notifyOnAllTabsRemoved();
notifyOnSelectionChanged(-1, null);
animateToolbarVisibility(isToolbarShown(), 0);
}
};
}
public final void selectTab(@NonNull final Tab tab) {
ensureNotNull(tab, "The tab may not be null");
enqueuePendingAction(new Runnable() {
@Override
public void run() {
int index = indexOfOrThrowException(tab);
if (!isSwitcherShown()) {
viewRecycler.remove(new TabView(selectedTabIndex));
viewRecycler.inflate(new TabView(index));
selectedTabIndex = index;
notifyOnSelectionChanged(index, tab);
} else {
selectedTabIndex = index;
hideSwitcher();
}
}
});
}
@Deprecated
private int getChildIndex(final int index) {
return getCount() - (index + 1);
}
private void enqueuePendingAction(@NonNull final Runnable action) {
pendingActions.add(action);
executePendingAction();
}
private void executePendingAction() {
if (!isAnimationRunning()) {
final Runnable action = pendingActions.poll();
if (action != null) {
new Runnable() {
@Override
public void run() {
action.run();
executePendingAction();
}
}.run();
}
}
}
@Nullable
public final Tab getSelectedTab() {
return selectedTabIndex != -1 ? getTab(selectedTabIndex) : null;
}
public final int getSelectedTabIndex() {
return selectedTabIndex;
}
public final boolean isEmpty() {
return getCount() == 0;
}
public final int getCount() {
return tabs.size();
}
public final Tab getTab(final int index) {
return tabs.get(index);
}
public final int indexOf(@NonNull final Tab tab) {
ensureNotNull(tab, "The tab may not be null");
return tabs.indexOf(tab);
}
private int indexOfOrThrowException(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index == -1) {
throw new NoSuchElementException("No such tab: " + tab);
}
return index;
}
public final boolean isSwitcherShown() {
return switcherShown;
}
private int calculateTabViewBottomMargin(@NonNull final View view) {
Axis axis = isDraggingHorizontally() ? Axis.ORTHOGONAL_AXIS : Axis.DRAGGING_AXIS;
float tabHeight = (view.getHeight() - 2 * tabInset) * getScale(view, true);
float totalHeight = getSize(axis, tabContainer);
int toolbarHeight = isToolbarShown() ? toolbar.getHeight() - tabInset : 0;
int stackHeight = isDraggingHorizontally() ? 0 : STACKED_TAB_COUNT * stackedTabSpacing;
return Math.round(tabHeight + tabInset + toolbarHeight + stackHeight -
(totalHeight - getPaddingTop() - getPaddingBottom()));
}
private OnGlobalLayoutListener createShowSwitcherLayoutListener(
@NonNull final TabView tabView) {
return new OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
ViewUtil.removeOnGlobalLayoutListener(tabView.view.getViewTreeObserver(), this);
animateShowSwitcher(tabView);
}
};
}
private OnGlobalLayoutListener createInflateTabViewLayoutListener(
@NonNull final TabView tabView) {
return new OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
View view = tabView.view;
ViewUtil.removeOnGlobalLayoutListener(view.getViewTreeObserver(), this);
adaptTabViewSize(tabView);
applyTag(tabView);
}
};
}
private void adaptTabViewSize(@NonNull final TabView tabView) {
View view = tabView.view;
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
float scale = getScale(view, true);
setScale(Axis.DRAGGING_AXIS, view, scale);
setScale(Axis.ORTHOGONAL_AXIS, view, scale);
}
private void animateShowSwitcher(@NonNull final TabView tabView) {
View view = tabView.view;
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
float scale = getScale(view, true);
if (tabView.index < selectedTabIndex) {
setPosition(Axis.DRAGGING_AXIS, view, getSize(Axis.DRAGGING_AXIS, tabContainer));
} else if (tabView.index > selectedTabIndex) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
setPosition(Axis.DRAGGING_AXIS, view,
isDraggingHorizontally() ? 0 : layoutParams.topMargin);
}
if (tabViewBottomMargin == -1) {
tabViewBottomMargin = calculateTabViewBottomMargin(view);
}
long animationDuration = getResources().getInteger(android.R.integer.config_longAnimTime);
animateMargin(view, calculateTabViewBottomMargin(view), animationDuration);
ViewPropertyAnimator animation = view.animate();
animation.setDuration(animationDuration);
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setListener(
createAnimationListenerWrapper(createShowSwitcherAnimationListener(tabView)));
animateScale(Axis.DRAGGING_AXIS, animation, scale);
animateScale(Axis.ORTHOGONAL_AXIS, animation, scale);
animatePosition(Axis.DRAGGING_AXIS, animation, view, tabView.tag.projectedPosition, true);
animatePosition(Axis.ORTHOGONAL_AXIS, animation, view, 0, true);
animation.setStartDelay(0);
animation.start();
animateToolbarVisibility(isToolbarShown(),
getResources().getInteger(android.R.integer.config_shortAnimTime));
}
private void animateHideSwitcher(@NonNull final TabView tabView) {
View view = tabView.view;
long animationDuration = getResources().getInteger(android.R.integer.config_longAnimTime);
animateMargin(view, -(tabInset + tabBorderWidth), animationDuration);
ViewPropertyAnimator animation = view.animate();
animation.setDuration(animationDuration);
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setListener(
createAnimationListenerWrapper(createHideSwitcherAnimationListener(tabView)));
animateScale(Axis.DRAGGING_AXIS, animation, 1);
animateScale(Axis.ORTHOGONAL_AXIS, animation, 1);
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
animatePosition(Axis.ORTHOGONAL_AXIS, animation, view,
isDraggingHorizontally() ? layoutParams.topMargin : 0, false);
if (tabView.index < selectedTabIndex) {
animatePosition(Axis.DRAGGING_AXIS, animation, view, getSize(Axis.DRAGGING_AXIS, this),
false);
} else if (tabView.index > selectedTabIndex) {
animatePosition(Axis.DRAGGING_AXIS, animation, view,
isDraggingHorizontally() ? 0 : layoutParams.topMargin, false);
} else {
animatePosition(Axis.DRAGGING_AXIS, animation, view,
isDraggingHorizontally() ? 0 : layoutParams.topMargin, false);
}
animation.setStartDelay(0);
animation.start();
animateToolbarVisibility(isToolbarShown() && isEmpty(), 0);
}
private AnimatorListener createAnimationListenerWrapper(
@Nullable final AnimatorListener listener) {
return new AnimatorListenerAdapter() {
private void endAnimation() {
if (--runningAnimations == 0) {
executePendingAction();
}
}
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
runningAnimations++;
if (listener != null) {
listener.onAnimationStart(animation);
}
}
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
if (listener != null) {
listener.onAnimationEnd(animation);
}
endAnimation();
}
@Override
public void onAnimationCancel(final Animator animation) {
super.onAnimationCancel(animation);
if (listener != null) {
listener.onAnimationCancel(animation);
}
endAnimation();
}
};
}
// TODO: Calling this method should also work when the view is not yet inflated
// TODO: Should this be executed as a pending action?
@SuppressWarnings("WrongConstant")
public final void showSwitcher() {
if (!isSwitcherShown() && !isAnimationRunning()) {
switcherShown = true;
notifyOnSwitcherShown();
attachedPosition = calculateAttachedPosition();
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
calculateAndClipTopThresholdPosition(tabView, iterator.previous());
if (tabView.index == selectedTabIndex || tabView.isVisible()) {
viewRecycler.inflate(tabView);
View view = tabView.view;
if (!ViewCompat.isLaidOut(view)) {
view.getViewTreeObserver().addOnGlobalLayoutListener(
createShowSwitcherLayoutListener(tabView));
} else {
animateShowSwitcher(tabView);
}
}
}
}
}
private void animateMargin(@NonNull final View view, final int targetMargin,
final long animationDuration) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
int initialMargin = layoutParams.bottomMargin;
ValueAnimator animation = ValueAnimator.ofInt(targetMargin - initialMargin);
animation.setDuration(animationDuration);
animation.addListener(createAnimationListenerWrapper(null));
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setStartDelay(0);
animation.addUpdateListener(createMarginAnimatorUpdateListener(view, initialMargin));
animation.start();
}
private AnimatorUpdateListener createMarginAnimatorUpdateListener(@NonNull final View view,
final int initialMargin) {
return new AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
layoutParams.bottomMargin = initialMargin + (int) animation.getAnimatedValue();
view.setLayoutParams(layoutParams);
}
};
}
private AnimatorUpdateListener createOvershootUpAnimatorUpdateListener() {
return new AnimatorUpdateListener() {
private Float startPosition;
@Override
public void onAnimationUpdate(final ValueAnimator animation) {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.index == 0) {
View view = tabView.view;
if (startPosition == null) {
startPosition = getPosition(Axis.DRAGGING_AXIS, view);
}
setPosition(Axis.DRAGGING_AXIS, view,
startPosition + (float) animation.getAnimatedValue());
} else if (tabView.isInflated()) {
View firstView = iterator.first().view;
View view = tabView.view;
view.setVisibility(getPosition(Axis.DRAGGING_AXIS, firstView) <=
getPosition(Axis.DRAGGING_AXIS, view) ? View.INVISIBLE :
View.VISIBLE);
}
}
}
};
}
private void printActualPositions() {
Iterator iterator = new Iterator(true);
TabView tabView;
while ((tabView = iterator.next()) != null) {
System.out.println(tabView.index + ": " + tabView.tag.actualPosition);
}
}
// TODO: Calling this method should also work when the view is not yet inflated
// TODO: Should this be executed as a pending action?
public final void hideSwitcher() {
if (isSwitcherShown() && !isAnimationRunning()) {
switcherShown = false;
notifyOnSwitcherHidden();
tabViewBottomMargin = -1;
recyclerAdapter.clearCachedBitmaps();
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.isInflated()) {
animateHideSwitcher(tabView);
}
}
}
}
public final void toggleSwitcherVisibility() {
if (switcherShown) {
hideSwitcher();
} else {
showSwitcher();
}
}
private AnimatorListener createShowSwitcherAnimationListener(@NonNull final TabView tabView) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
applyTag(tabView);
}
};
}
private AnimatorListener createHideSwitcherAnimationListener(@NonNull final TabView tabView) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
if (tabView.index == selectedTabIndex) {
viewRecycler.inflate(tabView);
} else {
viewRecycler.remove(tabView);
viewRecycler.clearCache();
}
}
};
}
private float calculateAttachedPosition() {
return ((maxTabSpacing - minTabSpacing) / (1 - NON_LINEAR_DRAG_FACTOR)) *
NON_LINEAR_DRAG_FACTOR + calculateFirstTabTopThresholdPosition();
}
private AnimationListener createDragAnimationListener() {
return new AnimationListener() {
@Override
public void onAnimationStart(final Animation animation) {
}
@Override
public void onAnimationEnd(final Animation animation) {
handleRelease(null);
dragAnimation = null;
executePendingAction();
}
@Override
public void onAnimationRepeat(final Animation animation) {
}
};
}
private AnimatorListener createOvershootAnimationListenerWrapper(@NonNull final View view,
@Nullable final AnimatorListener listener) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
if (listener != null) {
listener.onAnimationEnd(animation);
}
}
};
}
private AnimatorListener createOvershootDownAnimationListener() {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
handleRelease(null);
executePendingAction();
}
};
}
private AnimatorListener createOvershootUpAnimationListener() {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
handleRelease(null);
}
};
}
private void dragToTopThresholdPosition() {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
calculateAndClipTopThresholdPosition(tabView, iterator.previous());
if (tabView.isInflated()) {
applyTag(tabView);
}
}
}
private void calculateAndClipTopThresholdPosition(@NonNull final TabView tabView,
@Nullable final TabView previous) {
float position = calculateTopThresholdPosition(tabView, previous);
clipDraggedTabPosition(position, tabView, previous);
}
private float calculateTopThresholdPosition(@NonNull final TabView tabView,
@Nullable final TabView previous) {
if (previous == null) {
return calculateFirstTabTopThresholdPosition();
} else {
if (tabView.index == 1) {
return previous.tag.actualPosition - minTabSpacing;
} else {
return previous.tag.actualPosition - maxTabSpacing;
}
}
}
private float calculateFirstTabTopThresholdPosition() {
return getCount() > STACKED_TAB_COUNT ? STACKED_TAB_COUNT * stackedTabSpacing :
(getCount() - 1) * stackedTabSpacing;
}
private void dragToBottomThresholdPosition() {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
calculateAndClipBottomThresholdPosition(tabView, iterator.previous());
if (tabView.isInflated()) {
applyTag(tabView);
}
}
}
private void calculateAndClipBottomThresholdPosition(@NonNull final TabView tabView,
@Nullable final TabView previous) {
float position = calculateBottomThresholdPosition(tabView);
clipDraggedTabPosition(position, tabView, previous);
}
private float calculateBottomThresholdPosition(@NonNull final TabView tabView) {
return (getCount() - (tabView.index + 1)) * maxTabSpacing;
}
private void updateTags() {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
Tag tag = tabView.tag;
tag.distance = 0;
}
}
// TODO: Move to TabView inner class
private void applyTag(@NonNull final TabView tabView) {
Tag tag = tabView.tag;
float position = tag.projectedPosition;
View view = tabView.view;
view.setAlpha(1f);
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
setPosition(Axis.DRAGGING_AXIS, view, position);
setPosition(Axis.ORTHOGONAL_AXIS, view, 0);
setRotation(Axis.ORTHOGONAL_AXIS, view, 0);
}
@Deprecated
@SuppressWarnings("WrongConstant")
private void adaptVisibility(@NonNull final TabView tabView) {
View view = tabView.view;
State state = tabView.tag.state;
view.setVisibility((state == State.TOP_MOST_HIDDEN || state == State.BOTTOM_MOST_HIDDEN) &&
!tabView.tag.closing ? View.INVISIBLE : View.VISIBLE);
}
private void calculateNonLinearPositionWhenDraggingDown(final float dragDistance,
@NonNull final TabView tabView,
@Nullable final TabView previous,
final float currentPosition) {
if (previous != null && previous.tag.state == State.VISIBLE &&
tabView.tag.state == State.VISIBLE) {
float newPosition = calculateNonLinearPosition(dragDistance, currentPosition, tabView);
if (previous.tag.projectedPosition - newPosition >= maxTabSpacing) {
lastAttachedIndex = tabView.index;
newPosition = previous.tag.projectedPosition - maxTabSpacing;
}
clipDraggedTabPosition(newPosition, tabView, previous);
}
}
private void calculateTabPosition(final float dragDistance, @NonNull final TabView tabView,
@Nullable final TabView previous) {
if (getCount() - tabView.index > 1) {
float distance = dragDistance - tabView.tag.distance;
tabView.tag.distance = dragDistance;
if (distance != 0) {
float currentPosition = tabView.tag.actualPosition;
float newPosition = currentPosition + distance;
clipDraggedTabPosition(newPosition, tabView, previous);
if (scrollDirection == ScrollDirection.DRAGGING_DOWN) {
calculateNonLinearPositionWhenDraggingDown(distance, tabView, previous,
currentPosition);
} else if (scrollDirection == ScrollDirection.DRAGGING_UP) {
calculateNonLinearPositionWhenDraggingUp(distance, tabView, previous,
currentPosition);
}
}
}
}
private void calculateNonLinearPositionWhenDraggingUp(final float dragDistance,
@NonNull final TabView tabView,
@Nullable final TabView previous,
final float currentPosition) {
if (tabView.tag.state == State.VISIBLE) {
boolean attached = tabView.tag.projectedPosition > attachedPosition;
if (previous == null || attached) {
lastAttachedIndex = tabView.index;
}
if (previous != null && !attached) {
float newPosition =
calculateNonLinearPosition(dragDistance, currentPosition, tabView);
if (previous.tag.state != State.STACKED_BOTTOM &&
previous.tag.state != State.BOTTOM_MOST_HIDDEN &&
previous.tag.projectedPosition - newPosition <= minTabSpacing) {
newPosition = previous.tag.projectedPosition - minTabSpacing;
}
clipDraggedTabPosition(newPosition, tabView, previous);
}
}
}
private float calculateNonLinearPosition(final float dragDistance, final float currentPosition,
@NonNull final TabView tabView) {
return currentPosition + (float) (dragDistance *
Math.pow(NON_LINEAR_DRAG_FACTOR, tabView.index - lastAttachedIndex));
}
private void clipDraggedTabPosition(final float dragPosition, @NonNull final TabView tabView,
@Nullable final TabView previous) {
Pair<Float, State> topMostPair = calculateTopMostPositionAndState(tabView, previous);
float topMostPosition = topMostPair.first;
if (dragPosition <= topMostPosition) {
tabView.tag.projectedPosition = topMostPair.first;
tabView.tag.actualPosition = dragPosition;
tabView.tag.state = topMostPair.second;
return;
} else {
Pair<Float, State> bottomMostPair = calculateBottomMostPositionAndState(tabView);
float bottomMostPosition = bottomMostPair.first;
if (dragPosition >= bottomMostPosition) {
tabView.tag.projectedPosition = bottomMostPair.first;
tabView.tag.actualPosition = dragPosition;
tabView.tag.state = bottomMostPair.second;
return;
}
}
tabView.tag.projectedPosition = dragPosition;
tabView.tag.actualPosition = dragPosition;
tabView.tag.state = State.VISIBLE;
}
private Pair<Float, State> calculateTopMostPositionAndState(@NonNull final TabView tabView,
@Nullable final TabView previous) {
if ((getCount() - tabView.index) <= STACKED_TAB_COUNT) {
float position = stackedTabSpacing * (getCount() - (tabView.index + 1));
return Pair.create(position,
(previous == null || previous.tag.state == State.VISIBLE) ? State.TOP_MOST :
State.STACKED_TOP);
} else {
float position = stackedTabSpacing * STACKED_TAB_COUNT;
return Pair.create(position,
(previous == null || previous.tag.state == State.VISIBLE) ? State.TOP_MOST :
State.TOP_MOST_HIDDEN);
}
}
private Pair<Float, State> calculateBottomMostPositionAndState(@NonNull final TabView tabView) {
float size = getSize(Axis.DRAGGING_AXIS, tabContainer);
int toolbarHeight =
isToolbarShown() && !isDraggingHorizontally() ? toolbar.getHeight() - tabInset : 0;
int padding = getPadding(Axis.DRAGGING_AXIS, Gravity.START) +
getPadding(Axis.DRAGGING_AXIS, Gravity.END);
int offset = isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing : 0;
if (tabView.index < STACKED_TAB_COUNT) {
float position =
size - toolbarHeight - tabInset - (stackedTabSpacing * (tabView.index + 1)) -
padding + offset;
return Pair.create(position, State.STACKED_BOTTOM);
} else {
float position =
size - toolbarHeight - tabInset - (stackedTabSpacing * STACKED_TAB_COUNT) -
padding + offset;
return Pair.create(position, State.BOTTOM_MOST_HIDDEN);
}
}
@Override
public final boolean onTouchEvent(final MotionEvent event) {
if (isSwitcherShown() && !isEmpty()) {
if (dragAnimation != null) {
dragAnimation.cancel();
dragAnimation = null;
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
handleDown(event);
return true;
case MotionEvent.ACTION_MOVE:
if (!isAnimationRunning() && event.getPointerId(0) == pointerId) {
if (velocityTracker == null) {
velocityTracker = VelocityTracker.obtain();
}
velocityTracker.addMovement(event);
handleDrag(getPosition(Axis.DRAGGING_AXIS, event),
getPosition(Axis.ORTHOGONAL_AXIS, event));
} else {
handleRelease(null);
handleDown(event);
}
return true;
case MotionEvent.ACTION_UP:
if (!isAnimationRunning() && event.getPointerId(0) == pointerId) {
handleRelease(event);
}
return true;
default:
break;
}
}
return super.onTouchEvent(event);
}
private boolean isAnimationRunning() {
return runningAnimations != 0 || relocateAnimation != null;
}
private void handleDown(@NonNull final MotionEvent event) {
pointerId = event.getPointerId(0);
if (velocityTracker == null) {
velocityTracker = VelocityTracker.obtain();
} else {
velocityTracker.clear();
}
velocityTracker.addMovement(event);
}
private boolean isTopDragThresholdReached() {
if (getCount() <= 1) {
return true;
} else {
TabView tabView = new TabView(0);
return tabView.tag.state == State.TOP_MOST;
}
}
private boolean isBottomDragThresholdReached() {
if (getCount() <= 1) {
return true;
} else {
TabView tabView = new TabView(getCount() - 2);
return tabView.tag.projectedPosition >= maxTabSpacing;
}
}
private void tiltOnOvershootDown(final float angle) {
float maxCameraDistance = getMaxCameraDistance();
float minCameraDistance = maxCameraDistance / 2f;
int firstVisibleIndex = -1;
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.isInflated()) {
View view = tabView.view;
if (!iterator.hasNext()) {
view.setCameraDistance(maxCameraDistance);
} else if (firstVisibleIndex == -1) {
view.setCameraDistance(minCameraDistance);
if (tabView.tag.state == State.VISIBLE) {
firstVisibleIndex = tabView.index;
}
} else {
int diff = tabView.index - firstVisibleIndex;
float ratio = (float) diff / (float) (getCount() - firstVisibleIndex);
view.setCameraDistance(
minCameraDistance + (maxCameraDistance - minCameraDistance) * ratio);
}
setPivot(Axis.DRAGGING_AXIS, view,
getPivotOnOvershootDown(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getPivotOnOvershootDown(Axis.ORTHOGONAL_AXIS, view));
setRotation(Axis.ORTHOGONAL_AXIS, view, angle);
}
}
}
private void tiltOnOvershootUp(final float angle) {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
View view = tabView.view;
if (tabView.index == 0) {
view.setCameraDistance(getMaxCameraDistance());
setPivot(Axis.DRAGGING_AXIS, view, getPivotOnOvershootUp(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getPivotOnOvershootUp(Axis.ORTHOGONAL_AXIS, view));
setRotation(Axis.ORTHOGONAL_AXIS, view, angle);
} else if (tabView.isInflated()) {
tabView.view.setVisibility(View.INVISIBLE);
}
}
}
private float getMaxCameraDistance() {
float density = getResources().getDisplayMetrics().density;
return density * 1280;
}
@SuppressWarnings("WrongConstant")
private boolean handleDrag(final float dragPosition, final float orthogonalPosition) {
if (dragPosition <= topDragThreshold) {
if (!dragHelper.isReset()) {
dragHelper.reset(0);
updateTags();
}
scrollDirection = ScrollDirection.OVERSHOOT_UP;
overshootDragHelper.update(dragPosition);
float overshootDistance = Math.abs(overshootDragHelper.getDragDistance());
if (overshootDistance <= maxOvershootDistance) {
float ratio = Math.max(0, Math.min(1, overshootDistance / maxOvershootDistance));
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.index == 0) {
View view = tabView.view;
float currentPosition = tabView.tag.projectedPosition;
setPivot(Axis.DRAGGING_AXIS, view,
getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
setPosition(Axis.DRAGGING_AXIS, view,
currentPosition - (currentPosition * ratio));
} else if (tabView.isInflated()) {
View firstView = iterator.first().view;
View view = tabView.view;
view.setVisibility(getPosition(Axis.DRAGGING_AXIS, firstView) <=
getPosition(Axis.DRAGGING_AXIS, view) ? View.INVISIBLE :
View.VISIBLE);
}
}
} else {
float ratio = Math.max(0, Math.min(1,
(overshootDistance - maxOvershootDistance) / maxOvershootDistance));
tiltOnOvershootUp(ratio * MAX_UP_OVERSHOOT_ANGLE);
}
} else if (dragPosition >= bottomDragThreshold) {
if (!dragHelper.isReset()) {
dragHelper.reset(0);
updateTags();
}
scrollDirection = ScrollDirection.OVERSHOOT_DOWN;
overshootDragHelper.update(dragPosition);
float overshootDistance = overshootDragHelper.getDragDistance();
float ratio = Math.max(0, Math.min(1, overshootDistance / maxOvershootDistance));
tiltOnOvershootDown(ratio * -MAX_DOWN_OVERSHOOT_ANGLE);
} else {
overshootDragHelper.reset();
float previousDistance = dragHelper.isReset() ? 0 : dragHelper.getDragDistance();
dragHelper.update(dragPosition);
closeDragHelper.update(orthogonalPosition);
if (scrollDirection == ScrollDirection.NONE && draggedTabView == null &&
closeDragHelper.hasThresholdBeenReached()) {
TabView tabView = getFocusedTabView(dragHelper.getDragStartPosition());
if (tabView != null && tabView.tab.isCloseable()) {
draggedTabView = tabView;
}
}
if (draggedTabView == null && dragHelper.hasThresholdBeenReached()) {
if (scrollDirection == ScrollDirection.OVERSHOOT_UP) {
scrollDirection = ScrollDirection.DRAGGING_DOWN;
} else if (scrollDirection == ScrollDirection.OVERSHOOT_DOWN) {
scrollDirection = ScrollDirection.DRAGGING_UP;
} else {
scrollDirection = previousDistance - dragHelper.getDragDistance() <= 0 ?
ScrollDirection.DRAGGING_DOWN : ScrollDirection.DRAGGING_UP;
}
}
if (draggedTabView != null) {
handleDragToClose();
} else if (scrollDirection != ScrollDirection.NONE) {
lastAttachedIndex = 0;
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
calculateTabPosition(dragHelper.getDragDistance(), tabView,
iterator.previous());
if (tabView.isInflated() && !tabView.isVisible()) {
viewRecycler.remove(tabView);
} else if (tabView.isVisible()) {
if (!tabView.isInflated()) {
inflateTabView(tabView);
} else {
applyTag(tabView);
}
}
}
checkIfDragThresholdReached(dragPosition);
}
return true;
}
return false;
}
private void inflateTabView(@NonNull final TabView tabView) {
boolean inflated = viewRecycler.inflate(tabView, tabViewBottomMargin);
if (inflated) {
View view = tabView.view;
view.getViewTreeObserver()
.addOnGlobalLayoutListener(createInflateTabViewLayoutListener(tabView));
} else {
adaptTabViewSize(tabView);
applyTag(tabView);
}
}
private boolean checkIfDragThresholdReached(final float dragPosition) {
if (isBottomDragThresholdReached() && (scrollDirection == ScrollDirection.DRAGGING_DOWN ||
scrollDirection == ScrollDirection.OVERSHOOT_DOWN)) {
bottomDragThreshold = dragPosition;
scrollDirection = ScrollDirection.OVERSHOOT_DOWN;
dragToBottomThresholdPosition();
return true;
} else if (isTopDragThresholdReached() && (scrollDirection == ScrollDirection.DRAGGING_UP ||
scrollDirection == ScrollDirection.OVERSHOOT_UP)) {
topDragThreshold = dragPosition;
scrollDirection = ScrollDirection.OVERSHOOT_UP;
dragToTopThresholdPosition();
return true;
}
return false;
}
private void handleDragToClose() {
View view = draggedTabView.view;
if (!draggedTabView.tag.closing) {
adaptTopMostTabViewWhenClosing(draggedTabView, draggedTabView.index + 1);
}
draggedTabView.tag.closing = true;
float dragDistance = closeDragHelper.getDragDistance();
setPivot(Axis.DRAGGING_AXIS, view, getPivotWhenClosing(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getPivotWhenClosing(Axis.ORTHOGONAL_AXIS, view));
float scale = getScale(view, true);
setPosition(Axis.ORTHOGONAL_AXIS, view, dragDistance);
float ratio = 1 - (Math.abs(dragDistance) / calculateClosedTabPosition());
float scaledClosedTabScale = closedTabScale * scale;
float targetScale = scaledClosedTabScale + ratio * (scale - scaledClosedTabScale);
setScale(Axis.DRAGGING_AXIS, view, targetScale);
setScale(Axis.ORTHOGONAL_AXIS, view, targetScale);
view.setAlpha(closedTabAlpha + ratio * (1 - closedTabAlpha));
}
private void adaptTopMostTabViewWhenClosing(@NonNull final TabView closedTabView,
final int index) {
if (closedTabView.tag.state == State.TOP_MOST) {
TabView tabView = new TabView(index);
if (tabView.tag.state == State.TOP_MOST_HIDDEN) {
tabView.tag.state = State.TOP_MOST;
inflateTabView(tabView);
}
}
}
private void adaptTopMostTabViewWhenClosingAborted(@NonNull final TabView closedTabView,
final int index) {
if (closedTabView.tag.state == State.TOP_MOST) {
TabView tabView = new TabView(index);
if (tabView.tag.state == State.TOP_MOST) {
tabView.tag.state = State.TOP_MOST_HIDDEN;
viewRecycler.remove(tabView);
}
}
}
private float calculateClosedTabPosition() {
return getSize(Axis.ORTHOGONAL_AXIS, tabContainer);
}
@Nullable
private TabView getFocusedTabView(final float position) {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.tag.state == State.VISIBLE || tabView.tag.state == State.TOP_MOST) {
View view = tabView.view;
float toolbarHeight = isToolbarShown() && !isDraggingHorizontally() ?
toolbar.getHeight() - tabInset : 0;
float viewPosition = getPosition(Axis.DRAGGING_AXIS, view) + toolbarHeight +
getPadding(Axis.DRAGGING_AXIS, Gravity.START);
if (viewPosition <= position) {
return tabView;
}
}
}
return null;
}
private void handleRelease(@Nullable final MotionEvent event) {
boolean thresholdReached = dragHelper.hasThresholdBeenReached();
ScrollDirection flingDirection = this.scrollDirection;
this.dragHelper.reset(dragThreshold);
this.overshootDragHelper.reset();
this.closeDragHelper.reset();
this.topDragThreshold = -Float.MAX_VALUE;
this.bottomDragThreshold = Float.MAX_VALUE;
this.scrollDirection = ScrollDirection.NONE;
if (draggedTabView != null) {
float flingVelocity = 0;
if (event != null && velocityTracker != null) {
int pointerId = event.getPointerId(0);
velocityTracker.computeCurrentVelocity(1000, maxFlingVelocity);
flingVelocity = Math.abs(velocityTracker.getXVelocity(pointerId));
}
View view = draggedTabView.view;
boolean close = flingVelocity >= minCloseFlingVelocity ||
Math.abs(getPosition(Axis.ORTHOGONAL_AXIS, view)) >
getSize(Axis.ORTHOGONAL_AXIS, view) / 4f;
animateClose(draggedTabView, close, flingVelocity, 0,
createCloseAnimationListener(draggedTabView, close));
} else if (flingDirection == ScrollDirection.DRAGGING_UP ||
flingDirection == ScrollDirection.DRAGGING_DOWN) {
updateTags();
if (event != null && velocityTracker != null && thresholdReached) {
animateFling(event, flingDirection);
}
} else if (flingDirection == ScrollDirection.OVERSHOOT_DOWN) {
updateTags();
animateOvershootDown();
} else if (flingDirection == ScrollDirection.OVERSHOOT_UP) {
animateOvershootUp();
} else if (event != null && !dragHelper.hasThresholdBeenReached() &&
!closeDragHelper.hasThresholdBeenReached()) {
handleClick(event);
} else {
updateTags();
}
if (velocityTracker != null) {
velocityTracker.recycle();
velocityTracker = null;
}
}
private void handleClick(@NonNull final MotionEvent event) {
TabView tabView = getFocusedTabView(getPosition(Axis.DRAGGING_AXIS, event));
if (tabView != null) {
selectTab(tabView.tab);
}
}
private void animateOvershootDown() {
animateTilt(new AccelerateDecelerateInterpolator(), createOvershootDownAnimationListener(),
MAX_DOWN_OVERSHOOT_ANGLE);
}
private void animateOvershootUp() {
boolean tilted = animateTilt(new AccelerateInterpolator(), null, MAX_UP_OVERSHOOT_ANGLE);
if (tilted) {
enqueuePendingAction(new Runnable() {
@Override
public void run() {
animateOvershootUp(new DecelerateInterpolator());
}
});
} else {
animateOvershootUp(new AccelerateDecelerateInterpolator());
}
}
private void animateOvershootUp(@NonNull final Interpolator interpolator) {
TabView tabView = new TabView(0);
View view = tabView.view;
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
float position = getPosition(Axis.DRAGGING_AXIS, view);
float targetPosition = tabView.tag.projectedPosition;
long animationDuration = getResources().getInteger(android.R.integer.config_shortAnimTime);
ValueAnimator animation = ValueAnimator.ofFloat(targetPosition - position);
animation.setDuration(Math.round(animationDuration * Math.abs(
(targetPosition - position) / (float) (STACKED_TAB_COUNT * stackedTabSpacing))));
animation.addListener(createAnimationListenerWrapper(createOvershootUpAnimationListener()));
animation.setInterpolator(interpolator);
animation.setStartDelay(0);
animation.addUpdateListener(createOvershootUpAnimatorUpdateListener());
animation.start();
}
private boolean animateTilt(@NonNull final Interpolator interpolator,
@Nullable final AnimatorListener listener, final float maxAngle) {
long animationDuration = getResources().getInteger(android.R.integer.config_shortAnimTime);
Iterator iterator = new Iterator(true);
TabView tabView;
boolean result = false;
while ((tabView = iterator.next()) != null) {
if (tabView.isInflated()) {
View view = tabView.view;
if (getRotation(Axis.ORTHOGONAL_AXIS, view) != 0) {
result = true;
ViewPropertyAnimator animation = view.animate();
animation.setListener(createAnimationListenerWrapper(
createOvershootAnimationListenerWrapper(view,
iterator.hasNext() ? null :
listener))); // TODO: Iterator.hasNext() will not work
animation.setDuration(Math.round(animationDuration *
(Math.abs(getRotation(Axis.ORTHOGONAL_AXIS, view)) / maxAngle)));
animation.setInterpolator(interpolator);
animateRotation(Axis.ORTHOGONAL_AXIS, animation, 0);
animation.setStartDelay(0);
animation.start();
}
}
}
return result;
}
private void animateFling(@NonNull final MotionEvent event,
@NonNull final ScrollDirection flingDirection) {
int pointerId = event.getPointerId(0);
velocityTracker.computeCurrentVelocity(1000, maxFlingVelocity);
float flingVelocity = Math.abs(velocityTracker.getYVelocity(pointerId));
if (flingVelocity > minFlingVelocity) {
float flingDistance = 0.25f * flingVelocity;
if (flingDirection == ScrollDirection.DRAGGING_UP) {
flingDistance = -1 * flingDistance;
}
dragAnimation = new FlingAnimation(flingDistance);
dragAnimation.setFillAfter(true);
dragAnimation.setAnimationListener(createDragAnimationListener());
dragAnimation.setDuration(Math.round(Math.abs(flingDistance) / flingVelocity * 1000));
dragAnimation.setInterpolator(new DecelerateInterpolator());
startAnimation(dragAnimation);
}
}
public final void setDecorator(@NonNull final Decorator decorator) {
ensureNotNull(decorator, "The decorator may not be null");
this.decorator = decorator;
this.childViewRecycler.clearCache();
this.recyclerAdapter.clearCachedBitmaps();
}
public final Decorator getDecorator() {
ensureNotNull(decorator, "No decorator has been set", IllegalStateException.class);
return decorator;
}
public final void addListener(@NonNull final Listener listener) {
ensureNotNull(listener, "The listener may not be null");
this.listeners.add(listener);
}
public final void removeListener(@NonNull final Listener listener) {
ensureNotNull(listener, "The listener may not be null");
this.listeners.remove(listener);
}
@NonNull
public final Toolbar getToolbar() {
return toolbar;
}
public final void showToolbar(final boolean show) {
toolbar.setVisibility(show ? View.VISIBLE : View.INVISIBLE);
}
public final boolean isToolbarShown() {
return toolbar.getVisibility() == View.VISIBLE;
}
public final void setToolbarTitle(@Nullable final CharSequence title) {
toolbar.setTitle(title);
}
public final void setToolbarTitle(@StringRes final int resourceId) {
setToolbarTitle(getContext().getText(resourceId));
}
public final void inflateToolbarMenu(@MenuRes final int resourceId,
@Nullable final OnMenuItemClickListener listener) {
toolbar.inflateMenu(resourceId);
toolbar.setOnMenuItemClickListener(listener);
}
public final Menu getToolbarMenu() {
return toolbar.getMenu();
}
public static void setupWithMenu(@NonNull final TabSwitcher tabSwitcher,
@NonNull final Menu menu,
@Nullable final OnClickListener listener) {
ensureNotNull(tabSwitcher, "The tab switcher may not be null");
ensureNotNull(menu, "The menu may not be null");
for (int i = 0; i < menu.size(); i++) {
MenuItem menuItem = menu.getItem(i);
View view = menuItem.getActionView();
if (view instanceof TabSwitcherButton) {
TabSwitcherButton tabSwitcherButton = (TabSwitcherButton) view;
tabSwitcherButton.setOnClickListener(listener);
tabSwitcherButton.setCount(tabSwitcher.getCount());
tabSwitcher.addListener(tabSwitcherButton);
}
}
}
public final void setToolbarNavigationIcon(@Nullable final Drawable icon,
@Nullable final OnClickListener listener) {
toolbar.setNavigationIcon(icon);
toolbar.setNavigationOnClickListener(listener);
}
public final void setToolbarNavigationIcon(@DrawableRes final int resourceId,
@Nullable final OnClickListener listener) {
setToolbarNavigationIcon(ContextCompat.getDrawable(getContext(), resourceId), listener);
}
@Override
public final void setPadding(final int left, final int top, final int right, final int bottom) {
padding = new int[]{left, top, right, bottom};
LayoutParams toolbarLayoutParams = (LayoutParams) toolbar.getLayoutParams();
toolbarLayoutParams.setMargins(left, top, right, 0);
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
ViewHolder viewHolder = tabView.viewHolder;
if (viewHolder != null) {
adaptChildAndPreviewMargins(viewHolder);
}
}
}
private void adaptChildAndPreviewMargins(@NonNull final ViewHolder viewHolder) {
if (viewHolder.child != null) {
LayoutParams childLayoutParams = (LayoutParams) viewHolder.child.getLayoutParams();
childLayoutParams.setMargins(getPaddingLeft(), getPaddingTop(), getPaddingRight(),
getPaddingBottom());
}
LayoutParams previewLayoutParams =
(LayoutParams) viewHolder.previewImageView.getLayoutParams();
previewLayoutParams.setMargins(getPaddingLeft(), getPaddingTop(), getPaddingRight(),
getPaddingBottom());
}
@Override
public final void onTitleChanged(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index != -1) {
TabView tabView = new TabView(index);
if (tabView.isInflated()) {
ViewHolder viewHolder = tabView.viewHolder;
adaptTitle(viewHolder, tab);
}
}
}
private void adaptTitle(@NonNull final ViewHolder viewHolder, @NonNull final Tab tab) {
viewHolder.titleTextView.setText(tab.getTitle());
}
@Override
public final void onIconChanged(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index != -1) {
TabView tabView = new TabView(index);
if (tabView.isInflated()) {
ViewHolder viewHolder = tabView.viewHolder;
adaptIcon(viewHolder, tab);
}
}
}
private void adaptIcon(@NonNull final ViewHolder viewHolder, @NonNull final Tab tab) {
viewHolder.titleTextView
.setCompoundDrawablesWithIntrinsicBounds(tab.getIcon(getContext()), null, null,
null);
}
@Override
public final void onCloseableChanged(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index != -1) {
TabView tabView = new TabView(index);
if (tabView.isInflated()) {
ViewHolder viewHolder = tabView.viewHolder;
adaptCloseButton(viewHolder, tab);
}
}
}
private void adaptCloseButton(@NonNull final ViewHolder viewHolder, @NonNull final Tab tab) {
viewHolder.closeButton.setVisibility(tab.isCloseable() ? View.VISIBLE : View.GONE);
viewHolder.closeButton
.setOnClickListener(tab.isCloseable() ? createCloseButtonClickListener(tab) : null);
}
@Override
public final void onColorChanged(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index != -1) {
TabView tabView = new TabView(index);
if (tabView.isInflated()) {
View view = tabView.view;
ViewHolder viewHolder = tabView.viewHolder;
adaptColor(view, viewHolder, tab);
}
}
}
private void adaptColor(@NonNull final View view, @NonNull final ViewHolder viewHolder,
@NonNull final Tab tab) {
int color = tab.getColor();
Drawable background = view.getBackground();
background
.setColorFilter(color != -1 ? color : tabBackgroundColor, PorterDuff.Mode.MULTIPLY);
Drawable border = viewHolder.borderView.getBackground();
border.setColorFilter(color != -1 ? color : tabBackgroundColor, PorterDuff.Mode.MULTIPLY);
}
@Override
public final int getPaddingLeft() {
return padding[0];
}
@Override
public final int getPaddingTop() {
return padding[1];
}
@Override
public final int getPaddingRight() {
return padding[2];
}
@Override
public final int getPaddingBottom() {
return padding[3];
}
@Override
public final int getPaddingStart() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
return getLayoutDirection() == LAYOUT_DIRECTION_RTL ? getPaddingRight() :
getPaddingLeft();
}
return getPaddingLeft();
}
@Override
public final int getPaddingEnd() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
return getLayoutDirection() == LAYOUT_DIRECTION_RTL ? getPaddingLeft() :
getPaddingRight();
}
return getPaddingRight();
}
@Override
public final void onGlobalLayout() {
ViewUtil.removeOnGlobalLayoutListener(getViewTreeObserver(), this);
if (selectedTabIndex != -1) {
TabView tabView = new TabView(selectedTabIndex);
viewRecycler.inflate(tabView);
}
}
} | library/src/main/java/de/mrapp/android/tabswitcher/TabSwitcher.java | /*
* Copyright 2016 Michael Rapp
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.tabswitcher;
import android.animation.Animator;
import android.animation.Animator.AnimatorListener;
import android.animation.AnimatorListenerAdapter;
import android.animation.ValueAnimator;
import android.animation.ValueAnimator.AnimatorUpdateListener;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.AttrRes;
import android.support.annotation.DrawableRes;
import android.support.annotation.MenuRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StringRes;
import android.support.annotation.StyleRes;
import android.support.v4.content.ContextCompat;
import android.support.v4.util.Pair;
import android.support.v4.view.ViewCompat;
import android.support.v7.widget.Toolbar;
import android.support.v7.widget.Toolbar.OnMenuItemClickListener;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.ViewPropertyAnimator;
import android.view.ViewTreeObserver.OnGlobalLayoutListener;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.Animation;
import android.view.animation.Animation.AnimationListener;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.view.animation.Transformation;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.Set;
import de.mrapp.android.tabswitcher.gesture.DragHelper;
import de.mrapp.android.tabswitcher.view.TabSwitcherButton;
import de.mrapp.android.util.DisplayUtil.Orientation;
import de.mrapp.android.util.ThemeUtil;
import de.mrapp.android.util.ViewUtil;
import static de.mrapp.android.util.Condition.ensureAtLeast;
import static de.mrapp.android.util.Condition.ensureNotNull;
import static de.mrapp.android.util.DisplayUtil.getOrientation;
/**
* A chrome-like tab switcher.
*
* @author Michael Rapp
* @since 1.0.0
*/
public class TabSwitcher extends FrameLayout implements OnGlobalLayoutListener, Tab.Callback {
/**
* Defines the interface, a class, which should be notified about a tab switcher's events, must
* implement.
*/
public interface Listener {
/**
* The method, which is invoked, when the tab switcher has been shown.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
*/
void onSwitcherShown(@NonNull final TabSwitcher tabSwitcher);
/**
* The method, which is invoked, when the tab switcher has been hidden.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
*/
void onSwitcherHidden(@NonNull final TabSwitcher tabSwitcher);
/**
* The method, which is invoked, when the currently selected tab has been changed.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
* @param selectedTabIndex
* The index of the currently selected tab as an {@link Integer} value or -1, if the
* tab switcher does not contain any tabs
* @param selectedTab
* The currently selected tab as an instance of the class {@link Tab} or null, if
* the tab switcher does not contain any tabs
*/
void onSelectionChanged(@NonNull final TabSwitcher tabSwitcher, int selectedTabIndex,
@Nullable Tab selectedTab);
/**
* The method, which is invoked, when a tab has been added to the tab switcher.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
* @param index
* The index of the tab, which has been added, as an {@link Integer} value
* @param tab
* The tab, which has been added, as an instance of the class {@link Tab}. The tab
* may not be null
*/
void onTabAdded(@NonNull final TabSwitcher tabSwitcher, int index, @NonNull Tab tab);
/**
* The method, which is invoked, when a tab has been removed from the tab switcher.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
* @param index
* The index of the tab, which has been removed, as an {@link Integer} value
* @param tab
* The tab, which has been removed, as an instance of the class {@link Tab}. The tab
* may not be null
*/
void onTabRemoved(@NonNull final TabSwitcher tabSwitcher, int index, @NonNull Tab tab);
/**
* The method, which is invoked, when all tabs have been removed from the tab switcher.
*
* @param tabSwitcher
* The observed tab switcher as an instance of the class {@link TabSwitcher}. The
* tab switcher may not be null
*/
void onAllTabsRemoved(@NonNull final TabSwitcher tabSwitcher);
}
public enum AnimationType {
SWIPE_LEFT,
SWIPE_RIGHT
}
// TODO: Provide a built-in view holder
public static abstract class Decorator {
public int getViewType(@NonNull final Tab tab) {
return 0;
}
public int getViewTypeCount() {
return 1;
}
@NonNull
public abstract View onInflateView(@NonNull final LayoutInflater inflater,
@NonNull final ViewGroup parent, final int viewType);
public abstract void onShowTab(@NonNull final Context context,
@NonNull final TabSwitcher tabSwitcher,
@NonNull final View view, @NonNull final Tab tab,
final int viewType);
}
private class ChildViewRecycler {
private SparseArray<View> views;
@NonNull
public View inflateView(@NonNull final ViewGroup parent, final int viewType) {
View child = null;
if (views == null) {
views = new SparseArray<>(getDecorator().getViewTypeCount());
} else {
child = views.get(viewType);
}
if (child == null) {
child = getDecorator().onInflateView(inflater, parent, viewType);
views.put(viewType, child);
}
return child;
}
public void clearCache() {
if (views != null) {
views.clear();
views = null;
}
}
}
private static class PreviewDataBinder
extends AbstractDataBinder<Bitmap, Tab, ImageView, TabView> {
/**
* The tab switcher, the data binder belongs to.
*/
private final TabSwitcher tabSwitcher;
/**
* The view recycler, which is used to inflate child views.
*/
private final ChildViewRecycler childViewRecycler;
/**
* The view, which is rendered as a preview image.
*/
private View child;
/**
* Creates a new data binder, which allows to render preview images of tabs.
*
* @param tabSwitcher
* The tab switcher, the data binder belong to, as an instance of the class {@link
* TabSwitcher}. The tab switcher may not be null
* @param childViewRecycler
* The view recycler, which should be used to inflate child views, as an instance of
* the class {@link ChildViewRecycler}. The view recycler may not be null
*/
public PreviewDataBinder(@NonNull final TabSwitcher tabSwitcher,
@NonNull final ChildViewRecycler childViewRecycler) {
super(tabSwitcher.getContext());
this.tabSwitcher = tabSwitcher;
this.childViewRecycler = childViewRecycler;
}
@Override
protected void onPreExecute(@NonNull final ImageView view,
@NonNull final TabView... params) {
TabView tabView = params[0];
ViewHolder viewHolder = tabView.viewHolder;
child = viewHolder.child;
int viewType = tabSwitcher.getDecorator().getViewType(tabView.tab);
if (child == null) {
child = childViewRecycler.inflateView(viewHolder.childContainer, viewType);
// TODO: Must the view also be added to the parent? This is relevant when calling the showSwitcher-method, while the TabSwitcher is not yet inflated
} else {
viewHolder.child = null;
}
tabSwitcher.getDecorator()
.onShowTab(getContext(), tabSwitcher, child, tabView.tab, viewType);
}
@Nullable
@Override
protected Bitmap doInBackground(@NonNull final Tab key, @NonNull final TabView... params) {
Bitmap bitmap = Bitmap.createBitmap(child.getWidth(), child.getHeight(),
Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
child.draw(canvas);
// TODO: This is only for debugging purposes
Paint paint = new Paint();
paint.setColor(Color.RED);
canvas.drawCircle(100, 100, 40, paint);
return bitmap;
}
@Override
protected void onPostExecute(@NonNull final ImageView view, @Nullable final Bitmap data,
@NonNull final TabView... params) {
view.setImageBitmap(data);
view.setVisibility(data != null ? View.VISIBLE : View.GONE);
}
}
private class RecyclerAdapter extends ViewRecycler.Adapter<TabView, Integer> {
private final PreviewDataBinder dataBinder;
// TODO: Only add child view, if tab view is the selected one
private void addChildView(@NonNull final TabView tabView) {
ViewHolder viewHolder = tabView.viewHolder;
View view = viewHolder.child;
int viewType = getDecorator().getViewType(tabView.tab);
if (view == null) {
ViewGroup parent = viewHolder.childContainer;
view = childViewRecycler.inflateView(parent, viewType);
LayoutParams layoutParams =
new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
layoutParams.setMargins(getPaddingLeft(), getPaddingTop(), getPaddingRight(),
getPaddingBottom());
parent.addView(view, 0, layoutParams);
viewHolder.child = view;
}
viewHolder.previewImageView.setVisibility(View.GONE);
viewHolder.previewImageView.setImageBitmap(null);
viewHolder.borderView.setVisibility(View.GONE);
getDecorator().onShowTab(getContext(), TabSwitcher.this, view, tabView.tab, viewType);
}
private void renderChildView(@NonNull final TabView tabView) {
ViewHolder viewHolder = tabView.viewHolder;
viewHolder.borderView.setVisibility(View.VISIBLE);
boolean async = viewHolder.child == null;
dataBinder.load(tabView.tab, viewHolder.previewImageView, async, tabView);
if (!async) {
removeChildView(viewHolder);
}
}
private void removeChildView(@NonNull final ViewHolder viewHolder) {
if (viewHolder.childContainer.getChildCount() > 2) {
viewHolder.childContainer.removeViewAt(0);
}
}
public RecyclerAdapter() {
this.dataBinder = new PreviewDataBinder(TabSwitcher.this, childViewRecycler);
}
public void clearCachedBitmaps() {
dataBinder.clearCache();
}
@NonNull
@Override
public View onInflateView(@NonNull final LayoutInflater inflater,
@Nullable final ViewGroup parent, @NonNull final TabView tabView,
final int viewType, @NonNull final Integer... params) {
ViewHolder viewHolder = new ViewHolder();
View view = inflater.inflate(
isDraggingHorizontally() ? R.layout.tab_view_horizontally : R.layout.tab_view,
tabContainer, false);
Drawable backgroundDrawable =
ContextCompat.getDrawable(getContext(), R.drawable.tab_background);
ViewUtil.setBackground(view, backgroundDrawable);
int padding = tabInset + tabBorderWidth;
view.setPadding(padding, tabInset, padding, padding);
viewHolder.titleContainer = (ViewGroup) view.findViewById(R.id.tab_title_container);
viewHolder.titleTextView = (TextView) view.findViewById(R.id.tab_title_text_view);
viewHolder.closeButton = (ImageButton) view.findViewById(R.id.close_tab_button);
viewHolder.childContainer = (ViewGroup) view.findViewById(R.id.child_container);
viewHolder.previewImageView = (ImageView) view.findViewById(R.id.preview_image_view);
adaptChildAndPreviewMargins(viewHolder);
viewHolder.borderView = view.findViewById(R.id.border_view);
Drawable borderDrawable =
ContextCompat.getDrawable(getContext(), R.drawable.tab_border);
ViewUtil.setBackground(viewHolder.borderView, borderDrawable);
LayoutParams layoutParams =
new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
int borderMargin = -(tabInset + tabBorderWidth);
layoutParams.leftMargin = borderMargin;
layoutParams.topMargin = -(tabInset + tabTitleContainerHeight);
layoutParams.rightMargin = borderMargin;
layoutParams.bottomMargin = params.length > 0 ? params[0] : borderMargin;
view.setLayoutParams(layoutParams);
view.setTag(viewHolder);
tabView.view = view;
tabView.viewHolder = viewHolder;
return view;
}
@Override
public void onShowView(@NonNull final Context context, @NonNull final View view,
@NonNull final TabView tabView, @NonNull final Integer... params) {
if (!tabView.isInflated()) {
tabView.viewHolder = (ViewHolder) view.getTag();
tabView.view = view;
}
Tab tab = tabView.tab;
ViewHolder viewHolder = (ViewHolder) view.getTag();
adaptTitle(viewHolder, tab);
adaptIcon(viewHolder, tab);
adaptCloseButton(viewHolder, tab);
adaptColor(view, viewHolder, tab);
if (!isSwitcherShown()) {
addChildView(tabView);
} else {
renderChildView(tabView);
}
}
@Override
public void onRemoveView(@NonNull final View view, @NonNull final TabView tabView) {
ViewHolder viewHolder = (ViewHolder) view.getTag();
removeChildView(viewHolder);
viewHolder.child = null;
}
}
private class TabView {
private int index;
@NonNull
private Tab tab;
@NonNull
private Tag tag;
private View view;
private ViewHolder viewHolder;
public TabView(final int index) {
ensureAtLeast(index, 0, "The index must be at least 0");
this.index = index;
this.tab = getTab(index);
this.view = viewRecycler.getView(this);
if (view != null) {
this.viewHolder = (ViewHolder) view.getTag();
} else {
this.viewHolder = null;
}
this.tag = tags.get(tab);
if (tag == null) {
tag = new Tag();
tags.put(tab, tag);
}
}
public boolean isInflated() {
return view != null && viewHolder != null;
}
public boolean isVisible() {
return (tag.state != State.TOP_MOST_HIDDEN && tag.state != State.BOTTOM_MOST_HIDDEN) ||
tag.closing;
}
@Override
public final String toString() {
return "TabView [index = " + index + "]";
}
@Override
public int hashCode() {
return tab.hashCode();
}
@Override
public boolean equals(final Object obj) {
if (obj == null)
return false;
if (obj.getClass() != getClass())
return false;
TabView other = (TabView) obj;
return tab.equals(other.tab);
}
}
private class TabViewComparator implements Comparator<TabView> {
@Override
public int compare(TabView o1, TabView o2) {
return ((Integer) o1.index).compareTo(o2.index);
}
}
private class Iterator implements java.util.Iterator<TabView> {
private boolean reverse;
private int index;
private int end;
private TabView current;
private TabView previous;
private TabView first;
public Iterator() {
this(false);
}
public Iterator(final boolean reverse) {
this(reverse, -1);
}
public Iterator(final boolean reverse, final int start) {
this(reverse, start, -1);
}
public Iterator(final boolean reverse, final int start, final int end) {
this.reverse = reverse;
this.end = end != -1 ? (reverse ? end - 1 : end + 1) : -1;
this.previous = null;
this.index = start != -1 ? start : (reverse ? getCount() - 1 : 0);
int previousIndex = reverse ? this.index + 1 : this.index - 1;
if (previousIndex >= 0 && previousIndex < getCount()) {
this.current = new TabView(previousIndex);
} else {
this.current = null;
}
}
public TabView first() {
return first;
}
public TabView previous() {
return previous;
}
public TabView peek() {
if (hasNext()) {
return new TabView(index);
}
return null;
}
@Override
public boolean hasNext() {
if (index == end) {
return false;
} else {
if (reverse) {
return index >= 0;
} else {
return getCount() - index >= 1;
}
}
}
@Override
public TabView next() {
if (hasNext()) {
previous = current;
if (first == null) {
first = current;
}
current = new TabView(index);
index += reverse ? -1 : 1;
return current;
}
return null;
}
}
private static class ViewHolder {
private ViewGroup titleContainer;
private TextView titleTextView;
private ImageButton closeButton;
private ViewGroup childContainer;
private View child;
private ImageView previewImageView;
private View borderView;
}
private static class Tag implements Cloneable {
private float projectedPosition;
private float actualPosition;
private float distance;
private State state;
private boolean closing;
@Override
public Tag clone() {
Tag clone;
try {
clone = (Tag) super.clone();
} catch (ClassCastException | CloneNotSupportedException e) {
clone = new Tag();
}
clone.projectedPosition = projectedPosition;
clone.actualPosition = actualPosition;
clone.distance = distance;
clone.state = state;
clone.closing = closing;
return clone;
}
}
private enum State {
STACKED_TOP,
TOP_MOST_HIDDEN,
TOP_MOST,
VISIBLE,
BOTTOM_MOST_HIDDEN,
STACKED_BOTTOM
}
private enum ScrollDirection {
NONE,
DRAGGING_UP,
DRAGGING_DOWN,
OVERSHOOT_UP,
OVERSHOOT_DOWN;
}
private enum Axis {
DRAGGING_AXIS,
ORTHOGONAL_AXIS
}
private class FlingAnimation extends Animation {
private final float flingDistance;
public FlingAnimation(final float flingDistance) {
this.flingDistance = flingDistance;
}
@Override
protected void applyTransformation(final float interpolatedTime, final Transformation t) {
if (dragAnimation != null) {
handleDrag(flingDistance * interpolatedTime, 0);
}
}
}
private static final int STACKED_TAB_COUNT = 3;
private static final float NON_LINEAR_DRAG_FACTOR = 0.5f;
private static final float MAX_DOWN_OVERSHOOT_ANGLE = 3f;
private static final float MAX_UP_OVERSHOOT_ANGLE = 2f;
private int[] padding;
private Toolbar toolbar;
private ViewGroup tabContainer;
private Set<Listener> listeners;
private LayoutInflater inflater;
private ChildViewRecycler childViewRecycler;
private ViewRecycler<TabView, Integer> viewRecycler;
private RecyclerAdapter recyclerAdapter;
private Decorator decorator;
private Queue<Runnable> pendingActions;
/**
* A list, which contains the tab switcher's tabs.
*/
private List<Tab> tabs;
// TODO: Only inflated views should be associated with tags. This allows to abandon this map.
private Map<Tab, Tag> tags;
private int selectedTabIndex;
private int tabBackgroundColor;
private int dragThreshold;
/**
* An instance of the class {@link DragHelper}, which is used to recognize drag gestures.
*/
private DragHelper dragHelper;
private DragHelper overshootDragHelper;
private DragHelper closeDragHelper;
private VelocityTracker velocityTracker;
private boolean switcherShown;
private int stackedTabSpacing;
private int minTabSpacing;
private int maxTabSpacing;
private int maxOvershootDistance;
private float minFlingVelocity;
private float maxFlingVelocity;
private float minCloseFlingVelocity;
private float closedTabAlpha;
private float closedTabScale;
private int tabInset;
private int tabBorderWidth;
private int tabTitleContainerHeight;
private int tabViewBottomMargin;
private ScrollDirection scrollDirection;
private TabView draggedTabView;
private int lastAttachedIndex;
private float attachedPosition;
private float topDragThreshold = -Float.MIN_VALUE;
private float bottomDragThreshold = Float.MAX_VALUE;
private int pointerId = -1;
private Animation dragAnimation;
@Deprecated
private ViewPropertyAnimator relocateAnimation;
private ViewPropertyAnimator toolbarAnimation;
private int runningAnimations;
/**
* Initializes the view.
*
* @param attributeSet
* The attribute set, which should be used to initialize the view, as an instance of the
* type {@link AttributeSet} or null, if no attributes should be obtained
* @param defaultStyle
* The default style to apply to this view. If 0, no style will be applied (beyond what
* is included in the theme). This may either be an attribute resource, whose value will
* be retrieved from the current theme, or an explicit style resource
* @param defaultStyleResource
* A resource identifier of a style resource that supplies default values for the view,
* used only if the default style is 0 or can not be found in the theme. Can be 0 to not
* look for defaults
*/
private void initialize(@Nullable final AttributeSet attributeSet,
@AttrRes final int defaultStyle,
@StyleRes final int defaultStyleResource) {
getViewTreeObserver().addOnGlobalLayoutListener(this);
runningAnimations = 0;
inflater = LayoutInflater.from(getContext());
padding = new int[]{0, 0, 0, 0};
listeners = new LinkedHashSet<>();
pendingActions = new LinkedList<>();
tabs = new ArrayList<>();
tags = new HashMap<>();
selectedTabIndex = -1;
switcherShown = false;
Resources resources = getResources();
dragThreshold = resources.getDimensionPixelSize(R.dimen.drag_threshold);
dragHelper = new DragHelper(dragThreshold);
overshootDragHelper = new DragHelper(0);
closeDragHelper =
new DragHelper(resources.getDimensionPixelSize(R.dimen.close_drag_threshold));
stackedTabSpacing = resources.getDimensionPixelSize(R.dimen.stacked_tab_spacing);
minTabSpacing = resources.getDimensionPixelSize(R.dimen.min_tab_spacing);
maxTabSpacing = resources.getDimensionPixelSize(R.dimen.max_tab_spacing);
maxOvershootDistance = resources.getDimensionPixelSize(R.dimen.max_overshoot_distance);
ViewConfiguration configuration = ViewConfiguration.get(getContext());
minFlingVelocity = configuration.getScaledMinimumFlingVelocity();
maxFlingVelocity = configuration.getScaledMaximumFlingVelocity();
minCloseFlingVelocity = resources.getDimensionPixelSize(R.dimen.min_close_fling_velocity);
TypedValue typedValue = new TypedValue();
resources.getValue(R.dimen.closed_tab_scale, typedValue, true);
closedTabScale = typedValue.getFloat();
resources.getValue(R.dimen.closed_tab_alpha, typedValue, true);
closedTabAlpha = typedValue.getFloat();
tabInset = resources.getDimensionPixelSize(R.dimen.tab_inset);
tabBorderWidth = resources.getDimensionPixelSize(R.dimen.tab_border_width);
tabTitleContainerHeight =
resources.getDimensionPixelSize(R.dimen.tab_title_container_height);
tabViewBottomMargin = -1;
scrollDirection = ScrollDirection.NONE;
inflateLayout();
childViewRecycler = new ChildViewRecycler();
recyclerAdapter = new RecyclerAdapter();
viewRecycler = new ViewRecycler<>(tabContainer, recyclerAdapter, inflater,
Collections.reverseOrder(new TabViewComparator()));
obtainStyledAttributes(attributeSet, defaultStyle, defaultStyleResource);
}
private void inflateLayout() {
toolbar = (Toolbar) inflater.inflate(R.layout.tab_switcher_toolbar, this, false);
toolbar.setVisibility(View.INVISIBLE);
addView(toolbar, LayoutParams.MATCH_PARENT,
ThemeUtil.getDimensionPixelSize(getContext(), R.attr.actionBarSize));
tabContainer = new FrameLayout(getContext());
addView(tabContainer, LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
}
private void notifyOnSwitcherShown() {
for (Listener listener : listeners) {
listener.onSwitcherShown(this);
}
}
private void notifyOnSwitcherHidden() {
for (Listener listener : listeners) {
listener.onSwitcherHidden(this);
}
}
private void notifyOnSelectionChanged(final int selectedTabIndex,
@Nullable final Tab selectedTab) {
for (Listener listener : listeners) {
listener.onSelectionChanged(this, selectedTabIndex, selectedTab);
}
}
private void notifyOnTabAdded(final int index, @NonNull final Tab tab) {
for (Listener listener : listeners) {
listener.onTabAdded(this, index, tab);
}
}
private void notifyOnTabRemoved(final int index, @NonNull final Tab tab) {
for (Listener listener : listeners) {
listener.onTabRemoved(this, index, tab);
}
}
private void notifyOnAllTabsRemoved() {
for (Listener listener : listeners) {
listener.onAllTabsRemoved(this);
}
}
private OnClickListener createCloseButtonClickListener(@NonNull final Tab tab) {
return new OnClickListener() {
@Override
public void onClick(final View v) {
removeTab(tab);
}
};
}
private void animateClose(@NonNull final TabView tabView, final boolean close,
final float flingVelocity, final long startDelay,
@Nullable final AnimatorListener listener) {
View view = tabView.view;
float scale = getScale(view, true);
float closedTabPosition = calculateClosedTabPosition();
float position = getPosition(Axis.ORTHOGONAL_AXIS, view);
float targetPosition =
close ? (position < 0 ? -1 * closedTabPosition : closedTabPosition) : 0;
float distance = Math.abs(targetPosition - position);
long animationDuration;
if (flingVelocity >= minCloseFlingVelocity) {
animationDuration = Math.round((distance / flingVelocity) * 1000);
} else {
animationDuration = Math.round(
getResources().getInteger(android.R.integer.config_longAnimTime) *
(distance / closedTabPosition));
}
ViewPropertyAnimator animation = view.animate();
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setListener(createAnimationListenerWrapper(listener));
animation.setDuration(animationDuration);
animatePosition(Axis.ORTHOGONAL_AXIS, animation, view, targetPosition, true);
animateScale(Axis.ORTHOGONAL_AXIS, animation, close ? closedTabScale * scale : scale);
animateScale(Axis.DRAGGING_AXIS, animation, close ? closedTabScale * scale : scale);
animation.alpha(close ? closedTabAlpha : 1);
animation.setStartDelay(startDelay);
animation.start();
}
private AnimatorListener createCloseAnimationListener(@NonNull final TabView closedTabView,
final boolean close) {
return new AnimatorListenerAdapter() {
private void adjustActualPositionOfStackedTabViews(final boolean reverse) {
Iterator iterator = new Iterator(reverse, closedTabView.index);
TabView tabView;
Float previousActualPosition = null;
while ((tabView = iterator.next()) != null) {
float actualPosition = tabView.tag.actualPosition;
if (previousActualPosition != null) {
tabView.tag.actualPosition = previousActualPosition;
applyTag(closedTabView);
}
previousActualPosition = actualPosition;
}
}
private void relocateWhenStackedTabViewWasRemoved(final boolean top) {
long startDelay = getResources().getInteger(android.R.integer.config_shortAnimTime);
int start = closedTabView.index + (top ? -1 : 1);
Iterator iterator = new Iterator(top, closedTabView.index);
TabView tabView;
Float previousProjectedPosition = null;
while ((tabView = iterator.next()) != null &&
(tabView.tag.state == State.TOP_MOST_HIDDEN ||
tabView.tag.state == State.STACKED_TOP ||
tabView.tag.state == State.BOTTOM_MOST_HIDDEN ||
tabView.tag.state == State.STACKED_BOTTOM)) {
float projectedPosition = tabView.tag.projectedPosition;
if (previousProjectedPosition != null) {
if (tabView.tag.state == State.TOP_MOST_HIDDEN ||
tabView.tag.state == State.BOTTOM_MOST_HIDDEN) {
TabView previous = iterator.previous();
tabView.tag.state = previous.tag.state;
if (top) {
tabView.tag.projectedPosition = previousProjectedPosition;
long delay = (start + 1 - tabView.index) * startDelay;
animateRelocate(tabView, previousProjectedPosition, delay,
createRelocateAnimationListener(tabView, null, true));
} else {
adaptVisibility(tabView);
}
break;
} else {
TabView peek = iterator.peek();
State peekState = peek != null ? peek.tag.state : null;
boolean reset = !iterator.hasNext() ||
(peekState != State.STACKED_TOP &&
peekState != State.STACKED_BOTTOM);
tabView.tag.projectedPosition = previousProjectedPosition;
long delay =
(top ? (start + 1 - tabView.index) : (tabView.index - start)) *
startDelay;
animateRelocate(tabView, previousProjectedPosition, delay,
createRelocateAnimationListener(tabView, null, reset));
}
}
previousProjectedPosition = projectedPosition;
}
adjustActualPositionOfStackedTabViews(!top);
}
private void relocateWhenVisibleTabViewWasRemoved() {
int start = closedTabView.index - 1;
if (start >= 0) {
long startDelay =
getResources().getInteger(android.R.integer.config_shortAnimTime);
Iterator iterator = new Iterator(true, start);
TabView tabView;
int firstStackedTabIndex = -1;
while ((tabView = iterator.next()) != null && firstStackedTabIndex == -1) {
if (tabView.tag.state == State.BOTTOM_MOST_HIDDEN ||
tabView.tag.state == State.STACKED_BOTTOM) {
firstStackedTabIndex = tabView.index;
}
TabView previous = iterator.previous();
boolean reset = !iterator.hasNext() || firstStackedTabIndex != -1;
AnimatorListener listener =
createRelocateAnimationListener(tabView, previous.tag, reset);
animateRelocate(tabView, previous.tag.projectedPosition,
(start + 1 - tabView.index) * startDelay, tabView.index == start ?
createRelocateAnimationListenerWrapper(closedTabView,
listener) : listener);
}
if (firstStackedTabIndex != -1) {
iterator = new Iterator(true, firstStackedTabIndex);
Float previousActualPosition = null;
while ((tabView = iterator.next()) != null) {
float actualPosition = tabView.tag.actualPosition;
if (previousActualPosition != null) {
tabView.tag.actualPosition = previousActualPosition;
}
previousActualPosition = actualPosition;
}
}
}
}
private void animateRelocate(@NonNull final TabView tabView,
final float relocatePosition, final long startDelay,
@Nullable final AnimatorListener listener) {
View view = tabView.view;
relocateAnimation = view.animate();
relocateAnimation.setListener(listener);
relocateAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
relocateAnimation.setDuration(
getResources().getInteger(android.R.integer.config_mediumAnimTime));
animatePosition(Axis.DRAGGING_AXIS, relocateAnimation, view, relocatePosition,
true);
relocateAnimation.setStartDelay(startDelay);
relocateAnimation.start();
}
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
if (close) {
if (closedTabView.tag.state == State.BOTTOM_MOST_HIDDEN) {
adjustActualPositionOfStackedTabViews(true);
} else if (closedTabView.tag.state == State.TOP_MOST_HIDDEN) {
adjustActualPositionOfStackedTabViews(false);
} else if (closedTabView.tag.state == State.STACKED_BOTTOM) {
relocateWhenStackedTabViewWasRemoved(false);
} else if (closedTabView.tag.state == State.STACKED_TOP) {
relocateWhenStackedTabViewWasRemoved(true);
} else {
relocateWhenVisibleTabViewWasRemoved();
}
}
}
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
if (close) {
int index = closedTabView.index;
viewRecycler.remove(closedTabView);
Tab tab = tabs.remove(index);
tab.removeCallback(TabSwitcher.this);
tags.remove(tab);
notifyOnTabRemoved(index, tab);
if (isEmpty()) {
selectedTabIndex = -1;
notifyOnSelectionChanged(-1, null);
animateToolbarVisibility(isToolbarShown(), 0);
} else if (selectedTabIndex == closedTabView.index) {
if (selectedTabIndex > 0) {
selectedTabIndex--;
}
notifyOnSelectionChanged(selectedTabIndex, getTab(selectedTabIndex));
}
} else {
View view = closedTabView.view;
adaptTopMostTabViewWhenClosingAborted(closedTabView, closedTabView.index + 1);
closedTabView.tag.closing = false;
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
handleRelease(null);
animateToolbarVisibility(true, 0);
}
draggedTabView = null;
}
};
}
private float getDefaultPivot(@NonNull final Axis axis, @NonNull final View view) {
if (axis == Axis.DRAGGING_AXIS) {
return isDraggingHorizontally() ? getSize(axis, view) / 2f : 0;
} else {
return isDraggingHorizontally() ? 0 : getSize(axis, view) / 2f;
}
}
private float getPivotWhenClosing(@NonNull final Axis axis, @NonNull final View view) {
if (axis == Axis.DRAGGING_AXIS) {
return maxTabSpacing;
} else {
return getDefaultPivot(axis, view);
}
}
private float getPivotOnOvershootDown(@NonNull final Axis axis, @NonNull final View view) {
if (axis == Axis.DRAGGING_AXIS) {
return maxTabSpacing;
} else {
return getSize(axis, view) / 2f;
}
}
private float getPivotOnOvershootUp(@NonNull final Axis axis, @NonNull final View view) {
return getSize(axis, view) / 2f;
}
private AnimatorListener createRelocateAnimationListenerWrapper(
@NonNull final TabView closedTabView, @Nullable final AnimatorListener listener) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
if (listener != null) {
listener.onAnimationStart(animation);
}
}
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
adaptTopMostTabViewWhenClosingAborted(closedTabView, closedTabView.index);
if (listener != null) {
listener.onAnimationEnd(animation);
}
}
};
}
private AnimatorListener createRelocateAnimationListener(@NonNull final TabView tabView,
@Nullable final Tag tag,
final boolean reset) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
View view = tabView.view;
view.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
if (tag != null) {
tags.put(tabView.tab, tag);
tabView.tag = tag;
}
applyTag(tabView);
if (reset) {
relocateAnimation = null;
executePendingAction();
}
}
};
}
/**
* Obtains all attributes froma specific attribute set.
*
* @param attributeSet
* The attribute set, the attributes should be obtained from, as an instance of the type
* {@link AttributeSet} or null, if no attributes should be obtained
* @param defaultStyle
* The default style to apply to this view. If 0, no style will be applied (beyond what
* is included in the theme). This may either be an attribute resource, whose value will
* be retrieved from the current theme, or an explicit style resource
* @param defaultStyleResource
* A resource identifier of a style resource that supplies default values for the view,
* used only if the default style is 0 or can not be found in the theme. Can be 0 to not
* look for defaults
*/
private void obtainStyledAttributes(@Nullable final AttributeSet attributeSet,
@AttrRes final int defaultStyle,
@StyleRes final int defaultStyleResource) {
TypedArray typedArray = getContext()
.obtainStyledAttributes(attributeSet, R.styleable.TabSwitcher, defaultStyle,
defaultStyleResource);
try {
obtainBackground(typedArray);
obtainTabBackgroundColor(typedArray);
} finally {
typedArray.recycle();
}
}
/**
* Obtains the view's background from a specific typed array.
*
* @param typedArray
* The typed array, the background should be obtained from, as an instance of the class
* {@link TypedArray}. The typed array may not be null
*/
private void obtainBackground(@NonNull final TypedArray typedArray) {
int resourceId = typedArray.getResourceId(R.styleable.TabSwitcher_android_background, 0);
if (resourceId != 0) {
ViewUtil.setBackground(this, ContextCompat.getDrawable(getContext(), resourceId));
} else {
int defaultValue =
ContextCompat.getColor(getContext(), R.color.tab_switcher_background_color);
int color =
typedArray.getColor(R.styleable.TabSwitcher_android_background, defaultValue);
setBackgroundColor(color);
}
}
/**
* Obtains the background color of tabs from a specific typed array.
*
* @param typedArray
* The typed array, the background color should be obtained from, as an instance of the
* class {@link TypedArray}. The typed array may not be null
*/
private void obtainTabBackgroundColor(@NonNull final TypedArray typedArray) {
int defaultValue = ContextCompat.getColor(getContext(), R.color.tab_background_color);
tabBackgroundColor =
typedArray.getColor(R.styleable.TabSwitcher_tabBackgroundColor, defaultValue);
}
private int getPadding(@NonNull final Axis axis, final int gravity) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return gravity == Gravity.START ? getPaddingTop() : getPaddingBottom();
} else {
return gravity == Gravity.START ? getPaddingLeft() : getPaddingRight();
}
}
private Axis getOrientationInvariantAxis(@NonNull final Axis axis) {
if (isDraggingHorizontally()) {
return axis == Axis.DRAGGING_AXIS ? Axis.ORTHOGONAL_AXIS : Axis.DRAGGING_AXIS;
}
return axis;
}
private boolean isDraggingHorizontally() {
return getOrientation(getContext()) == Orientation.LANDSCAPE;
}
private float getScale(@NonNull final View view, final boolean includePadding) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
float width = view.getWidth();
float targetWidth = width + layoutParams.leftMargin + layoutParams.rightMargin -
(includePadding ? getPaddingLeft() + getPaddingRight() : 0) -
(isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing : 0);
return targetWidth / width;
}
private float getSize(@NonNull final Axis axis, @NonNull final View view) {
return getSize(axis, view, false);
}
private float getSize(@NonNull final Axis axis, @NonNull final View view,
final boolean includePadding) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return view.getHeight() * getScale(view, includePadding);
} else {
return view.getWidth() * getScale(view, includePadding);
}
}
private float getPosition(@NonNull final Axis axis, @NonNull final MotionEvent event) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return event.getY();
} else {
return event.getX();
}
}
private float getPosition(@NonNull final Axis axis, @NonNull final View view) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return view.getY() -
(isToolbarShown() && isSwitcherShown() ? toolbar.getHeight() - tabInset : 0) -
getPadding(axis, Gravity.START);
} else {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
return view.getX() - layoutParams.leftMargin - getPaddingLeft() / 2f +
getPaddingRight() / 2f +
(isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing / 2f : 0);
}
}
private void setPosition(@NonNull final Axis axis, @NonNull final View view,
final float position) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
view.setY((isToolbarShown() && isSwitcherShown() ? toolbar.getHeight() - tabInset : 0) +
getPadding(axis, Gravity.START) + position);
} else {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
view.setX(position + layoutParams.leftMargin + getPaddingLeft() / 2f -
getPaddingRight() / 2f -
(isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing / 2f : 0));
}
}
private void animatePosition(@NonNull final Axis axis,
@NonNull final ViewPropertyAnimator animator,
@NonNull final View view, final float position,
final boolean includePadding) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
animator.y(
(isToolbarShown() && isSwitcherShown() ? toolbar.getHeight() - tabInset : 0) +
(includePadding ? getPadding(axis, Gravity.START) : 0) + position);
} else {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
animator.x(position + layoutParams.leftMargin +
(includePadding ? getPaddingLeft() / 2f - getPaddingRight() / 2f : 0) -
(isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing / 2f : 0));
}
}
private float getRotation(@NonNull final Axis axis, @NonNull final View view) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
return view.getRotationY();
} else {
return view.getRotationX();
}
}
private void setRotation(@NonNull final Axis axis, @NonNull final View view,
final float angle) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
view.setRotationY(isDraggingHorizontally() ? -1 * angle : angle);
} else {
view.setRotationX(isDraggingHorizontally() ? -1 * angle : angle);
}
}
private void animateRotation(@NonNull final Axis axis,
@NonNull final ViewPropertyAnimator animator, final float angle) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
animator.rotationY(isDraggingHorizontally() ? -1 * angle : angle);
} else {
animator.rotationX(isDraggingHorizontally() ? -1 * angle : angle);
}
}
private void setScale(@NonNull final Axis axis, @NonNull final View view, final float scale) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
view.setScaleY(scale);
} else {
view.setScaleX(scale);
}
}
private void animateScale(@NonNull final Axis axis,
@NonNull final ViewPropertyAnimator animator, final float scale) {
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
animator.scaleY(scale);
} else {
animator.scaleX(scale);
}
}
private void setPivot(@NonNull final Axis axis, @NonNull final View view, final float pivot) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
if (getOrientationInvariantAxis(axis) == Axis.DRAGGING_AXIS) {
float newPivot = pivot - layoutParams.topMargin - tabTitleContainerHeight;
view.setTranslationY(view.getTranslationY() +
(view.getPivotY() - newPivot) * (1 - view.getScaleY()));
view.setPivotY(newPivot);
} else {
float newPivot = pivot - layoutParams.leftMargin;
view.setTranslationX(view.getTranslationX() +
(view.getPivotX() - newPivot) * (1 - view.getScaleX()));
view.setPivotX(newPivot);
}
}
public TabSwitcher(@NonNull final Context context) {
this(context, null);
}
public TabSwitcher(@NonNull final Context context, @Nullable final AttributeSet attributeSet) {
super(context, attributeSet);
initialize(attributeSet, 0, 0);
}
public TabSwitcher(@NonNull final Context context, @Nullable final AttributeSet attributeSet,
@AttrRes final int defaultStyle) {
super(context, attributeSet, defaultStyle);
initialize(attributeSet, defaultStyle, 0);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public TabSwitcher(@NonNull final Context context, @Nullable final AttributeSet attributeSet,
@AttrRes final int defaultStyle, @StyleRes final int defaultStyleResource) {
super(context, attributeSet, defaultStyle, defaultStyleResource);
initialize(attributeSet, defaultStyle, defaultStyleResource);
}
public final void addTab(@NonNull final Tab tab) {
addTab(tab, getCount());
}
public final void addTab(@NonNull final Tab tab, final int index) {
addTab(tab, index, AnimationType.SWIPE_RIGHT);
}
// TODO: Add support for adding tab, while switcher is shown
public final void addTab(@NonNull final Tab tab, final int index,
@NonNull final AnimationType animationType) {
ensureNotNull(tab, "The tab may not be null");
ensureNotNull(animationType, "The animation type may not be null");
enqueuePendingAction(new Runnable() {
@Override
public void run() {
tabs.add(index, tab);
tab.addCallback(TabSwitcher.this);
notifyOnTabAdded(index, tab);
if (getCount() == 1) {
selectedTabIndex = 0;
notifyOnSelectionChanged(0, tab);
}
if (!isSwitcherShown()) {
toolbar.setAlpha(0);
if (selectedTabIndex == index && ViewCompat.isLaidOut(TabSwitcher.this)) {
viewRecycler.inflate(new TabView(index));
}
} else {
TabView tabView = new TabView(index);
tabView.view.getViewTreeObserver().addOnGlobalLayoutListener(
createAddTabViewLayoutListener(tabView, animationType));
}
}
});
}
private OnGlobalLayoutListener createAddTabViewLayoutListener(@NonNull final TabView tabView,
@NonNull final AnimationType animationType) {
return new OnGlobalLayoutListener() {
@SuppressWarnings("deprecation")
@Override
public void onGlobalLayout() {
View view = tabView.view;
ViewUtil.removeOnGlobalLayoutListener(view.getViewTreeObserver(), this);
view.setVisibility(View.VISIBLE);
view.setAlpha(closedTabAlpha);
float closedPosition = calculateClosedTabPosition();
float dragPosition = getPosition(Axis.DRAGGING_AXIS,
tabContainer.getChildAt(getChildIndex(tabView.index)));
float scale = getScale(view, true);
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
setPosition(Axis.ORTHOGONAL_AXIS, view,
animationType == AnimationType.SWIPE_LEFT ? -1 * closedPosition :
closedPosition);
setPosition(Axis.DRAGGING_AXIS, view, dragPosition);
setScale(Axis.ORTHOGONAL_AXIS, view, scale);
setScale(Axis.DRAGGING_AXIS, view, scale);
setPivot(Axis.DRAGGING_AXIS, view, getPivotWhenClosing(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getPivotWhenClosing(Axis.ORTHOGONAL_AXIS, view));
setScale(Axis.ORTHOGONAL_AXIS, view, closedTabScale * scale);
setScale(Axis.DRAGGING_AXIS, view, closedTabScale * scale);
animateClose(tabView, false, 0, 0, createAddAnimationListener(tabView));
}
};
}
private AnimatorListener createAddAnimationListener(@NonNull final TabView tabView) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
applyTag(tabView);
}
};
}
public final void removeTab(@NonNull final Tab tab) {
ensureNotNull(tab, "The tab may not be null");
enqueuePendingAction(new Runnable() {
@Override
public void run() {
int index = indexOfOrThrowException(tab);
TabView tabView = new TabView(index);
if (!isSwitcherShown()) {
viewRecycler.remove(tabView);
Tab tab = tabs.remove(index);
tab.removeCallback(TabSwitcher.this);
tags.remove(tab);
notifyOnTabRemoved(index, tab);
if (isEmpty()) {
selectedTabIndex = -1;
notifyOnSelectionChanged(-1, null);
toolbar.setAlpha(isToolbarShown() ? 1 : 0);
} else if (selectedTabIndex == index) {
if (selectedTabIndex > 0) {
selectedTabIndex--;
}
viewRecycler.inflate(new TabView(selectedTabIndex));
notifyOnSelectionChanged(selectedTabIndex, getTab(selectedTabIndex));
}
} else {
adaptTopMostTabViewWhenClosing(tabView, tabView.index + 1);
tabView.tag.closing = true;
View view = tabView.view;
setPivot(Axis.DRAGGING_AXIS, view,
getPivotWhenClosing(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getPivotWhenClosing(Axis.ORTHOGONAL_AXIS, view));
animateClose(tabView, true, 0, 0, createCloseAnimationListener(tabView, true));
}
}
});
}
public final void clear() {
enqueuePendingAction(new Runnable() {
@Override
public void run() {
if (!isSwitcherShown()) {
for (int i = tabs.size() - 1; i >= 0; i--) {
Tab tab = tabs.remove(i);
tab.removeCallback(TabSwitcher.this);
}
selectedTabIndex = -1;
viewRecycler.removeAll();
notifyOnSelectionChanged(-1, null);
notifyOnAllTabsRemoved();
toolbar.setAlpha(isToolbarShown() ? 1 : 0);
} else {
Iterator iterator = new Iterator(true);
TabView tabView;
int startDelay = 0;
while ((tabView = iterator.next()) != null) {
TabView previous = iterator.previous();
if (tabView.tag.state == State.VISIBLE ||
previous != null && previous.tag.state == State.VISIBLE) {
startDelay += getResources()
.getInteger(android.R.integer.config_shortAnimTime);
}
if (tabView.isInflated()) {
animateClose(tabView, true, 0, startDelay,
!iterator.hasNext() ? createClearAnimationListener() : null);
}
}
}
}
});
}
private void animateToolbarVisibility(final boolean visible, final long startDelay) {
if (toolbarAnimation != null) {
toolbarAnimation.cancel();
}
float targetAlpha = visible ? 1 : 0;
if (toolbar.getAlpha() != targetAlpha) {
toolbarAnimation = toolbar.animate();
toolbarAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
toolbarAnimation.setDuration(
getResources().getInteger(android.R.integer.config_mediumAnimTime));
toolbarAnimation.setStartDelay(startDelay);
toolbarAnimation.alpha(targetAlpha);
toolbarAnimation.start();
}
}
private AnimatorListener createClearAnimationListener() {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
for (int i = tabs.size() - 1; i >= 0; i--) {
Tab tab = tabs.remove(i);
tab.removeCallback(TabSwitcher.this);
}
selectedTabIndex = -1;
notifyOnAllTabsRemoved();
notifyOnSelectionChanged(-1, null);
animateToolbarVisibility(isToolbarShown(), 0);
}
};
}
public final void selectTab(@NonNull final Tab tab) {
ensureNotNull(tab, "The tab may not be null");
enqueuePendingAction(new Runnable() {
@Override
public void run() {
int index = indexOfOrThrowException(tab);
if (!isSwitcherShown()) {
viewRecycler.remove(new TabView(selectedTabIndex));
viewRecycler.inflate(new TabView(index));
selectedTabIndex = index;
notifyOnSelectionChanged(index, tab);
} else {
selectedTabIndex = index;
hideSwitcher();
}
}
});
}
@Deprecated
private int getChildIndex(final int index) {
return getCount() - (index + 1);
}
private void enqueuePendingAction(@NonNull final Runnable action) {
pendingActions.add(action);
executePendingAction();
}
private void executePendingAction() {
if (!isAnimationRunning()) {
final Runnable action = pendingActions.poll();
if (action != null) {
new Runnable() {
@Override
public void run() {
action.run();
executePendingAction();
}
}.run();
}
}
}
@Nullable
public final Tab getSelectedTab() {
return selectedTabIndex != -1 ? getTab(selectedTabIndex) : null;
}
public final int getSelectedTabIndex() {
return selectedTabIndex;
}
public final boolean isEmpty() {
return getCount() == 0;
}
public final int getCount() {
return tabs.size();
}
public final Tab getTab(final int index) {
return tabs.get(index);
}
public final int indexOf(@NonNull final Tab tab) {
ensureNotNull(tab, "The tab may not be null");
return tabs.indexOf(tab);
}
private int indexOfOrThrowException(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index == -1) {
throw new NoSuchElementException("No such tab: " + tab);
}
return index;
}
public final boolean isSwitcherShown() {
return switcherShown;
}
private int calculateTabViewBottomMargin(@NonNull final View view) {
Axis axis = isDraggingHorizontally() ? Axis.ORTHOGONAL_AXIS : Axis.DRAGGING_AXIS;
float tabHeight = (view.getHeight() - 2 * tabInset) * getScale(view, true);
float totalHeight = getSize(axis, tabContainer);
int toolbarHeight = isToolbarShown() ? toolbar.getHeight() - tabInset : 0;
int stackHeight = isDraggingHorizontally() ? 0 : STACKED_TAB_COUNT * stackedTabSpacing;
return Math.round(tabHeight + tabInset + toolbarHeight + stackHeight -
(totalHeight - getPaddingTop() - getPaddingBottom()));
}
private OnGlobalLayoutListener createShowSwitcherLayoutListener(
@NonNull final TabView tabView) {
return new OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
ViewUtil.removeOnGlobalLayoutListener(tabView.view.getViewTreeObserver(), this);
animateShowSwitcher(tabView);
}
};
}
private OnGlobalLayoutListener createInflateTabViewLayoutListener(
@NonNull final TabView tabView) {
return new OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
View view = tabView.view;
ViewUtil.removeOnGlobalLayoutListener(view.getViewTreeObserver(), this);
adaptTabViewSize(tabView);
applyTag(tabView);
}
};
}
private void adaptTabViewSize(@NonNull final TabView tabView) {
View view = tabView.view;
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
float scale = getScale(view, true);
setScale(Axis.DRAGGING_AXIS, view, scale);
setScale(Axis.ORTHOGONAL_AXIS, view, scale);
}
private void animateShowSwitcher(@NonNull final TabView tabView) {
View view = tabView.view;
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
float scale = getScale(view, true);
if (tabView.index < selectedTabIndex) {
setPosition(Axis.DRAGGING_AXIS, view, getSize(Axis.DRAGGING_AXIS, tabContainer));
} else if (tabView.index > selectedTabIndex) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
setPosition(Axis.DRAGGING_AXIS, view,
isDraggingHorizontally() ? 0 : layoutParams.topMargin);
}
if (tabViewBottomMargin == -1) {
tabViewBottomMargin = calculateTabViewBottomMargin(view);
}
long animationDuration = getResources().getInteger(android.R.integer.config_longAnimTime);
animateMargin(view, calculateTabViewBottomMargin(view), animationDuration);
ViewPropertyAnimator animation = view.animate();
animation.setDuration(animationDuration);
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setListener(
createAnimationListenerWrapper(createShowSwitcherAnimationListener(tabView)));
animateScale(Axis.DRAGGING_AXIS, animation, scale);
animateScale(Axis.ORTHOGONAL_AXIS, animation, scale);
animatePosition(Axis.DRAGGING_AXIS, animation, view, tabView.tag.projectedPosition, true);
animatePosition(Axis.ORTHOGONAL_AXIS, animation, view, 0, true);
animation.setStartDelay(0);
animation.start();
animateToolbarVisibility(isToolbarShown(),
getResources().getInteger(android.R.integer.config_shortAnimTime));
}
private void animateHideSwitcher(@NonNull final TabView tabView) {
View view = tabView.view;
long animationDuration = getResources().getInteger(android.R.integer.config_longAnimTime);
animateMargin(view, -(tabInset + tabBorderWidth), animationDuration);
ViewPropertyAnimator animation = view.animate();
animation.setDuration(animationDuration);
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setListener(
createAnimationListenerWrapper(createHideSwitcherAnimationListener(tabView)));
animateScale(Axis.DRAGGING_AXIS, animation, 1);
animateScale(Axis.ORTHOGONAL_AXIS, animation, 1);
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
animatePosition(Axis.ORTHOGONAL_AXIS, animation, view,
isDraggingHorizontally() ? layoutParams.topMargin : 0, false);
if (tabView.index < selectedTabIndex) {
animatePosition(Axis.DRAGGING_AXIS, animation, view, getSize(Axis.DRAGGING_AXIS, this),
false);
} else if (tabView.index > selectedTabIndex) {
animatePosition(Axis.DRAGGING_AXIS, animation, view,
isDraggingHorizontally() ? 0 : layoutParams.topMargin, false);
} else {
view.setVisibility(View.VISIBLE);
animatePosition(Axis.DRAGGING_AXIS, animation, view,
isDraggingHorizontally() ? 0 : layoutParams.topMargin, false);
}
animation.setStartDelay(0);
animation.start();
animateToolbarVisibility(isToolbarShown() && isEmpty(), 0);
}
private AnimatorListener createAnimationListenerWrapper(
@Nullable final AnimatorListener listener) {
return new AnimatorListenerAdapter() {
private void endAnimation() {
if (--runningAnimations == 0) {
executePendingAction();
}
}
@Override
public void onAnimationStart(final Animator animation) {
super.onAnimationStart(animation);
runningAnimations++;
if (listener != null) {
listener.onAnimationStart(animation);
}
}
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
if (listener != null) {
listener.onAnimationEnd(animation);
}
endAnimation();
}
@Override
public void onAnimationCancel(final Animator animation) {
super.onAnimationCancel(animation);
if (listener != null) {
listener.onAnimationCancel(animation);
}
endAnimation();
}
};
}
// TODO: Calling this method should also work when the view is not yet inflated
// TODO: Should this be executed as a pending action?
@SuppressWarnings("WrongConstant")
public final void showSwitcher() {
if (!isSwitcherShown() && !isAnimationRunning()) {
switcherShown = true;
notifyOnSwitcherShown();
attachedPosition = calculateAttachedPosition();
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
calculateAndClipTopThresholdPosition(tabView, iterator.previous());
if (tabView.index == selectedTabIndex || tabView.isVisible()) {
viewRecycler.inflate(tabView);
View view = tabView.view;
if (!ViewCompat.isLaidOut(view)) {
view.getViewTreeObserver().addOnGlobalLayoutListener(
createShowSwitcherLayoutListener(tabView));
} else {
animateShowSwitcher(tabView);
}
}
}
}
}
private void animateMargin(@NonNull final View view, final int targetMargin,
final long animationDuration) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
int initialMargin = layoutParams.bottomMargin;
ValueAnimator animation = ValueAnimator.ofInt(targetMargin - initialMargin);
animation.setDuration(animationDuration);
animation.addListener(createAnimationListenerWrapper(null));
animation.setInterpolator(new AccelerateDecelerateInterpolator());
animation.setStartDelay(0);
animation.addUpdateListener(createMarginAnimatorUpdateListener(view, initialMargin));
animation.start();
}
private AnimatorUpdateListener createMarginAnimatorUpdateListener(@NonNull final View view,
final int initialMargin) {
return new AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
LayoutParams layoutParams = (LayoutParams) view.getLayoutParams();
layoutParams.bottomMargin = initialMargin + (int) animation.getAnimatedValue();
view.setLayoutParams(layoutParams);
}
};
}
private AnimatorUpdateListener createOvershootUpAnimatorUpdateListener() {
return new AnimatorUpdateListener() {
private Float startPosition;
@Override
public void onAnimationUpdate(final ValueAnimator animation) {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.index == 0) {
View view = tabView.view;
if (startPosition == null) {
startPosition = getPosition(Axis.DRAGGING_AXIS, view);
}
setPosition(Axis.DRAGGING_AXIS, view,
startPosition + (float) animation.getAnimatedValue());
} else if (tabView.isInflated()) {
View firstView = iterator.first().view;
View view = tabView.view;
view.setVisibility(getPosition(Axis.DRAGGING_AXIS, firstView) <=
getPosition(Axis.DRAGGING_AXIS, view) ? View.INVISIBLE :
View.VISIBLE);
}
}
}
};
}
private void printActualPositions() {
Iterator iterator = new Iterator(true);
TabView tabView;
while ((tabView = iterator.next()) != null) {
System.out.println(tabView.index + ": " + tabView.tag.actualPosition);
}
}
// TODO: Calling this method should also work when the view is not yet inflated
// TODO: Should this be executed as a pending action?
public final void hideSwitcher() {
if (isSwitcherShown() && !isAnimationRunning()) {
switcherShown = false;
notifyOnSwitcherHidden();
tabViewBottomMargin = -1;
recyclerAdapter.clearCachedBitmaps();
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.isInflated()) {
animateHideSwitcher(tabView);
}
}
}
}
public final void toggleSwitcherVisibility() {
if (switcherShown) {
hideSwitcher();
} else {
showSwitcher();
}
}
private AnimatorListener createShowSwitcherAnimationListener(@NonNull final TabView tabView) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
applyTag(tabView);
}
};
}
private AnimatorListener createHideSwitcherAnimationListener(@NonNull final TabView tabView) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
if (tabView.index == selectedTabIndex) {
viewRecycler.inflate(tabView);
} else {
viewRecycler.remove(tabView);
viewRecycler.clearCache();
}
}
};
}
private float calculateAttachedPosition() {
return ((maxTabSpacing - minTabSpacing) / (1 - NON_LINEAR_DRAG_FACTOR)) *
NON_LINEAR_DRAG_FACTOR + calculateFirstTabTopThresholdPosition();
}
private AnimationListener createDragAnimationListener() {
return new AnimationListener() {
@Override
public void onAnimationStart(final Animation animation) {
}
@Override
public void onAnimationEnd(final Animation animation) {
handleRelease(null);
dragAnimation = null;
executePendingAction();
}
@Override
public void onAnimationRepeat(final Animation animation) {
}
};
}
private AnimatorListener createOvershootAnimationListenerWrapper(@NonNull final View view,
@Nullable final AnimatorListener listener) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
if (listener != null) {
listener.onAnimationEnd(animation);
}
}
};
}
private AnimatorListener createOvershootDownAnimationListener() {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
handleRelease(null);
executePendingAction();
}
};
}
private AnimatorListener createOvershootUpAnimationListener() {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
super.onAnimationEnd(animation);
handleRelease(null);
}
};
}
private void dragToTopThresholdPosition() {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
calculateAndClipTopThresholdPosition(tabView, iterator.previous());
if (tabView.isInflated()) {
applyTag(tabView);
}
}
}
private void calculateAndClipTopThresholdPosition(@NonNull final TabView tabView,
@Nullable final TabView previous) {
float position = calculateTopThresholdPosition(tabView, previous);
clipDraggedTabPosition(position, tabView, previous);
}
private float calculateTopThresholdPosition(@NonNull final TabView tabView,
@Nullable final TabView previous) {
if (previous == null) {
return calculateFirstTabTopThresholdPosition();
} else {
if (tabView.index == 1) {
return previous.tag.actualPosition - minTabSpacing;
} else {
return previous.tag.actualPosition - maxTabSpacing;
}
}
}
private float calculateFirstTabTopThresholdPosition() {
return getCount() > STACKED_TAB_COUNT ? STACKED_TAB_COUNT * stackedTabSpacing :
(getCount() - 1) * stackedTabSpacing;
}
private void dragToBottomThresholdPosition() {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
calculateAndClipBottomThresholdPosition(tabView, iterator.previous());
if (tabView.isInflated()) {
applyTag(tabView);
}
}
}
private void calculateAndClipBottomThresholdPosition(@NonNull final TabView tabView,
@Nullable final TabView previous) {
float position = calculateBottomThresholdPosition(tabView);
clipDraggedTabPosition(position, tabView, previous);
}
private float calculateBottomThresholdPosition(@NonNull final TabView tabView) {
return (getCount() - (tabView.index + 1)) * maxTabSpacing;
}
private void updateTags() {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
Tag tag = tabView.tag;
tag.distance = 0;
}
}
// TODO: Move to TabView inner class
private void applyTag(@NonNull final TabView tabView) {
Tag tag = tabView.tag;
float position = tag.projectedPosition;
View view = tabView.view;
view.setAlpha(1f);
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
setPosition(Axis.DRAGGING_AXIS, view, position);
setPosition(Axis.ORTHOGONAL_AXIS, view, 0);
setRotation(Axis.ORTHOGONAL_AXIS, view, 0);
}
@Deprecated
@SuppressWarnings("WrongConstant")
private void adaptVisibility(@NonNull final TabView tabView) {
View view = tabView.view;
view.setVisibility(getVisibility(tabView));
}
@Deprecated
private int getVisibility(@NonNull final TabView tabView) {
State state = tabView.tag.state;
return (state == State.TOP_MOST_HIDDEN || state == State.BOTTOM_MOST_HIDDEN) &&
!tabView.tag.closing ? View.INVISIBLE : View.VISIBLE;
}
private void calculateNonLinearPositionWhenDraggingDown(final float dragDistance,
@NonNull final TabView tabView,
@Nullable final TabView previous,
final float currentPosition) {
if (previous != null && previous.tag.state == State.VISIBLE &&
tabView.tag.state == State.VISIBLE) {
float newPosition = calculateNonLinearPosition(dragDistance, currentPosition, tabView);
if (previous.tag.projectedPosition - newPosition >= maxTabSpacing) {
lastAttachedIndex = tabView.index;
newPosition = previous.tag.projectedPosition - maxTabSpacing;
}
clipDraggedTabPosition(newPosition, tabView, previous);
}
}
private void calculateTabPosition(final float dragDistance, @NonNull final TabView tabView,
@Nullable final TabView previous) {
if (getCount() - tabView.index > 1) {
float distance = dragDistance - tabView.tag.distance;
tabView.tag.distance = dragDistance;
if (distance != 0) {
float currentPosition = tabView.tag.actualPosition;
float newPosition = currentPosition + distance;
clipDraggedTabPosition(newPosition, tabView, previous);
if (scrollDirection == ScrollDirection.DRAGGING_DOWN) {
calculateNonLinearPositionWhenDraggingDown(distance, tabView, previous,
currentPosition);
} else if (scrollDirection == ScrollDirection.DRAGGING_UP) {
calculateNonLinearPositionWhenDraggingUp(distance, tabView, previous,
currentPosition);
}
}
}
}
private void calculateNonLinearPositionWhenDraggingUp(final float dragDistance,
@NonNull final TabView tabView,
@Nullable final TabView previous,
final float currentPosition) {
if (tabView.tag.state == State.VISIBLE) {
boolean attached = tabView.tag.projectedPosition > attachedPosition;
if (previous == null || attached) {
lastAttachedIndex = tabView.index;
}
if (previous != null && !attached) {
float newPosition =
calculateNonLinearPosition(dragDistance, currentPosition, tabView);
if (previous.tag.state != State.STACKED_BOTTOM &&
previous.tag.state != State.BOTTOM_MOST_HIDDEN &&
previous.tag.projectedPosition - newPosition <= minTabSpacing) {
newPosition = previous.tag.projectedPosition - minTabSpacing;
}
clipDraggedTabPosition(newPosition, tabView, previous);
}
}
}
private float calculateNonLinearPosition(final float dragDistance, final float currentPosition,
@NonNull final TabView tabView) {
return currentPosition + (float) (dragDistance *
Math.pow(NON_LINEAR_DRAG_FACTOR, tabView.index - lastAttachedIndex));
}
private void clipDraggedTabPosition(final float dragPosition, @NonNull final TabView tabView,
@Nullable final TabView previous) {
Pair<Float, State> topMostPair = calculateTopMostPositionAndState(tabView, previous);
float topMostPosition = topMostPair.first;
if (dragPosition <= topMostPosition) {
tabView.tag.projectedPosition = topMostPair.first;
tabView.tag.actualPosition = dragPosition;
tabView.tag.state = topMostPair.second;
return;
} else {
Pair<Float, State> bottomMostPair = calculateBottomMostPositionAndState(tabView);
float bottomMostPosition = bottomMostPair.first;
if (dragPosition >= bottomMostPosition) {
tabView.tag.projectedPosition = bottomMostPair.first;
tabView.tag.actualPosition = dragPosition;
tabView.tag.state = bottomMostPair.second;
return;
}
}
tabView.tag.projectedPosition = dragPosition;
tabView.tag.actualPosition = dragPosition;
tabView.tag.state = State.VISIBLE;
}
private Pair<Float, State> calculateTopMostPositionAndState(@NonNull final TabView tabView,
@Nullable final TabView previous) {
if ((getCount() - tabView.index) <= STACKED_TAB_COUNT) {
float position = stackedTabSpacing * (getCount() - (tabView.index + 1));
return Pair.create(position,
(previous == null || previous.tag.state == State.VISIBLE) ? State.TOP_MOST :
State.STACKED_TOP);
} else {
float position = stackedTabSpacing * STACKED_TAB_COUNT;
return Pair.create(position,
(previous == null || previous.tag.state == State.VISIBLE) ? State.TOP_MOST :
State.TOP_MOST_HIDDEN);
}
}
private Pair<Float, State> calculateBottomMostPositionAndState(@NonNull final TabView tabView) {
float size = getSize(Axis.DRAGGING_AXIS, tabContainer);
int toolbarHeight =
isToolbarShown() && !isDraggingHorizontally() ? toolbar.getHeight() - tabInset : 0;
int padding = getPadding(Axis.DRAGGING_AXIS, Gravity.START) +
getPadding(Axis.DRAGGING_AXIS, Gravity.END);
int offset = isDraggingHorizontally() ? STACKED_TAB_COUNT * stackedTabSpacing : 0;
if (tabView.index < STACKED_TAB_COUNT) {
float position =
size - toolbarHeight - tabInset - (stackedTabSpacing * (tabView.index + 1)) -
padding + offset;
return Pair.create(position, State.STACKED_BOTTOM);
} else {
float position =
size - toolbarHeight - tabInset - (stackedTabSpacing * STACKED_TAB_COUNT) -
padding + offset;
return Pair.create(position, State.BOTTOM_MOST_HIDDEN);
}
}
@Override
public final boolean onTouchEvent(final MotionEvent event) {
if (isSwitcherShown() && !isEmpty()) {
if (dragAnimation != null) {
dragAnimation.cancel();
dragAnimation = null;
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
handleDown(event);
return true;
case MotionEvent.ACTION_MOVE:
if (!isAnimationRunning() && event.getPointerId(0) == pointerId) {
if (velocityTracker == null) {
velocityTracker = VelocityTracker.obtain();
}
velocityTracker.addMovement(event);
handleDrag(getPosition(Axis.DRAGGING_AXIS, event),
getPosition(Axis.ORTHOGONAL_AXIS, event));
} else {
handleRelease(null);
handleDown(event);
}
return true;
case MotionEvent.ACTION_UP:
if (!isAnimationRunning() && event.getPointerId(0) == pointerId) {
handleRelease(event);
}
return true;
default:
break;
}
}
return super.onTouchEvent(event);
}
private boolean isAnimationRunning() {
return runningAnimations != 0 || relocateAnimation != null;
}
private void handleDown(@NonNull final MotionEvent event) {
pointerId = event.getPointerId(0);
if (velocityTracker == null) {
velocityTracker = VelocityTracker.obtain();
} else {
velocityTracker.clear();
}
velocityTracker.addMovement(event);
}
private boolean isTopDragThresholdReached() {
if (getCount() <= 1) {
return true;
} else {
TabView tabView = new TabView(0);
return tabView.tag.state == State.TOP_MOST;
}
}
private boolean isBottomDragThresholdReached() {
if (getCount() <= 1) {
return true;
} else {
TabView tabView = new TabView(getCount() - 2);
return tabView.tag.projectedPosition >= maxTabSpacing;
}
}
private void tiltOnOvershootDown(final float angle) {
float maxCameraDistance = getMaxCameraDistance();
float minCameraDistance = maxCameraDistance / 2f;
int firstVisibleIndex = -1;
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.isInflated()) {
View view = tabView.view;
if (!iterator.hasNext()) {
view.setCameraDistance(maxCameraDistance);
} else if (firstVisibleIndex == -1) {
view.setCameraDistance(minCameraDistance);
if (tabView.tag.state == State.VISIBLE) {
firstVisibleIndex = tabView.index;
}
} else {
int diff = tabView.index - firstVisibleIndex;
float ratio = (float) diff / (float) (getCount() - firstVisibleIndex);
view.setCameraDistance(
minCameraDistance + (maxCameraDistance - minCameraDistance) * ratio);
}
setPivot(Axis.DRAGGING_AXIS, view,
getPivotOnOvershootDown(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getPivotOnOvershootDown(Axis.ORTHOGONAL_AXIS, view));
setRotation(Axis.ORTHOGONAL_AXIS, view, angle);
}
}
}
private void tiltOnOvershootUp(final float angle) {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
View view = tabView.view;
if (tabView.index == 0) {
view.setCameraDistance(getMaxCameraDistance());
setPivot(Axis.DRAGGING_AXIS, view, getPivotOnOvershootUp(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getPivotOnOvershootUp(Axis.ORTHOGONAL_AXIS, view));
setRotation(Axis.ORTHOGONAL_AXIS, view, angle);
} else if (tabView.isInflated()) {
tabView.view.setVisibility(View.INVISIBLE);
}
}
}
private float getMaxCameraDistance() {
float density = getResources().getDisplayMetrics().density;
return density * 1280;
}
@SuppressWarnings("WrongConstant")
private boolean handleDrag(final float dragPosition, final float orthogonalPosition) {
if (dragPosition <= topDragThreshold) {
if (!dragHelper.isReset()) {
dragHelper.reset(0);
updateTags();
}
scrollDirection = ScrollDirection.OVERSHOOT_UP;
overshootDragHelper.update(dragPosition);
float overshootDistance = Math.abs(overshootDragHelper.getDragDistance());
if (overshootDistance <= maxOvershootDistance) {
float ratio = Math.max(0, Math.min(1, overshootDistance / maxOvershootDistance));
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.index == 0) {
View view = tabView.view;
float currentPosition = tabView.tag.projectedPosition;
setPivot(Axis.DRAGGING_AXIS, view,
getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view,
getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
setPosition(Axis.DRAGGING_AXIS, view,
currentPosition - (currentPosition * ratio));
} else if (tabView.isInflated()) {
View firstView = iterator.first().view;
View view = tabView.view;
view.setVisibility(getPosition(Axis.DRAGGING_AXIS, firstView) <=
getPosition(Axis.DRAGGING_AXIS, view) ? View.INVISIBLE :
View.VISIBLE);
}
}
} else {
float ratio = Math.max(0, Math.min(1,
(overshootDistance - maxOvershootDistance) / maxOvershootDistance));
tiltOnOvershootUp(ratio * MAX_UP_OVERSHOOT_ANGLE);
}
} else if (dragPosition >= bottomDragThreshold) {
if (!dragHelper.isReset()) {
dragHelper.reset(0);
updateTags();
}
scrollDirection = ScrollDirection.OVERSHOOT_DOWN;
overshootDragHelper.update(dragPosition);
float overshootDistance = overshootDragHelper.getDragDistance();
float ratio = Math.max(0, Math.min(1, overshootDistance / maxOvershootDistance));
tiltOnOvershootDown(ratio * -MAX_DOWN_OVERSHOOT_ANGLE);
} else {
overshootDragHelper.reset();
float previousDistance = dragHelper.isReset() ? 0 : dragHelper.getDragDistance();
dragHelper.update(dragPosition);
closeDragHelper.update(orthogonalPosition);
if (scrollDirection == ScrollDirection.NONE && draggedTabView == null &&
closeDragHelper.hasThresholdBeenReached()) {
TabView tabView = getFocusedTabView(dragHelper.getDragStartPosition());
if (tabView != null && tabView.tab.isCloseable()) {
draggedTabView = tabView;
}
}
if (draggedTabView == null && dragHelper.hasThresholdBeenReached()) {
if (scrollDirection == ScrollDirection.OVERSHOOT_UP) {
scrollDirection = ScrollDirection.DRAGGING_DOWN;
} else if (scrollDirection == ScrollDirection.OVERSHOOT_DOWN) {
scrollDirection = ScrollDirection.DRAGGING_UP;
} else {
scrollDirection = previousDistance - dragHelper.getDragDistance() <= 0 ?
ScrollDirection.DRAGGING_DOWN : ScrollDirection.DRAGGING_UP;
}
}
if (draggedTabView != null) {
handleDragToClose();
} else if (scrollDirection != ScrollDirection.NONE) {
lastAttachedIndex = 0;
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
calculateTabPosition(dragHelper.getDragDistance(), tabView,
iterator.previous());
if (tabView.isInflated() && !tabView.isVisible()) {
viewRecycler.remove(tabView);
} else if (tabView.isVisible()) {
if (!tabView.isInflated()) {
inflateTabView(tabView);
} else {
applyTag(tabView);
}
}
}
checkIfDragThresholdReached(dragPosition);
}
return true;
}
return false;
}
private void inflateTabView(@NonNull final TabView tabView) {
boolean inflated = viewRecycler.inflate(tabView, tabViewBottomMargin);
if (inflated) {
View view = tabView.view;
view.getViewTreeObserver()
.addOnGlobalLayoutListener(createInflateTabViewLayoutListener(tabView));
} else {
adaptTabViewSize(tabView);
applyTag(tabView);
}
}
private boolean checkIfDragThresholdReached(final float dragPosition) {
if (isBottomDragThresholdReached() && (scrollDirection == ScrollDirection.DRAGGING_DOWN ||
scrollDirection == ScrollDirection.OVERSHOOT_DOWN)) {
bottomDragThreshold = dragPosition;
scrollDirection = ScrollDirection.OVERSHOOT_DOWN;
dragToBottomThresholdPosition();
return true;
} else if (isTopDragThresholdReached() && (scrollDirection == ScrollDirection.DRAGGING_UP ||
scrollDirection == ScrollDirection.OVERSHOOT_UP)) {
topDragThreshold = dragPosition;
scrollDirection = ScrollDirection.OVERSHOOT_UP;
dragToTopThresholdPosition();
return true;
}
return false;
}
private void handleDragToClose() {
View view = draggedTabView.view;
if (!draggedTabView.tag.closing) {
adaptTopMostTabViewWhenClosing(draggedTabView, draggedTabView.index + 1);
}
draggedTabView.tag.closing = true;
float dragDistance = closeDragHelper.getDragDistance();
setPivot(Axis.DRAGGING_AXIS, view, getPivotWhenClosing(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getPivotWhenClosing(Axis.ORTHOGONAL_AXIS, view));
float scale = getScale(view, true);
setPosition(Axis.ORTHOGONAL_AXIS, view, dragDistance);
float ratio = 1 - (Math.abs(dragDistance) / calculateClosedTabPosition());
float scaledClosedTabScale = closedTabScale * scale;
float targetScale = scaledClosedTabScale + ratio * (scale - scaledClosedTabScale);
setScale(Axis.DRAGGING_AXIS, view, targetScale);
setScale(Axis.ORTHOGONAL_AXIS, view, targetScale);
view.setAlpha(closedTabAlpha + ratio * (1 - closedTabAlpha));
}
private void adaptTopMostTabViewWhenClosing(@NonNull final TabView closedTabView,
final int index) {
if (closedTabView.tag.state == State.TOP_MOST) {
TabView tabView = new TabView(index);
if (tabView.tag.state == State.TOP_MOST_HIDDEN) {
tabView.tag.state = State.TOP_MOST;
inflateTabView(tabView);
}
}
}
private void adaptTopMostTabViewWhenClosingAborted(@NonNull final TabView closedTabView,
final int index) {
if (closedTabView.tag.state == State.TOP_MOST) {
TabView tabView = new TabView(index);
if (tabView.tag.state == State.TOP_MOST) {
tabView.tag.state = State.TOP_MOST_HIDDEN;
viewRecycler.remove(tabView);
}
}
}
private float calculateClosedTabPosition() {
return getSize(Axis.ORTHOGONAL_AXIS, tabContainer);
}
@Nullable
private TabView getFocusedTabView(final float position) {
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
if (tabView.tag.state == State.VISIBLE || tabView.tag.state == State.TOP_MOST) {
View view = tabView.view;
float toolbarHeight = isToolbarShown() && !isDraggingHorizontally() ?
toolbar.getHeight() - tabInset : 0;
float viewPosition = getPosition(Axis.DRAGGING_AXIS, view) + toolbarHeight +
getPadding(Axis.DRAGGING_AXIS, Gravity.START);
if (viewPosition <= position) {
return tabView;
}
}
}
return null;
}
private void handleRelease(@Nullable final MotionEvent event) {
boolean thresholdReached = dragHelper.hasThresholdBeenReached();
ScrollDirection flingDirection = this.scrollDirection;
this.dragHelper.reset(dragThreshold);
this.overshootDragHelper.reset();
this.closeDragHelper.reset();
this.topDragThreshold = -Float.MAX_VALUE;
this.bottomDragThreshold = Float.MAX_VALUE;
this.scrollDirection = ScrollDirection.NONE;
if (draggedTabView != null) {
float flingVelocity = 0;
if (event != null && velocityTracker != null) {
int pointerId = event.getPointerId(0);
velocityTracker.computeCurrentVelocity(1000, maxFlingVelocity);
flingVelocity = Math.abs(velocityTracker.getXVelocity(pointerId));
}
View view = draggedTabView.view;
boolean close = flingVelocity >= minCloseFlingVelocity ||
Math.abs(getPosition(Axis.ORTHOGONAL_AXIS, view)) >
getSize(Axis.ORTHOGONAL_AXIS, view) / 4f;
animateClose(draggedTabView, close, flingVelocity, 0,
createCloseAnimationListener(draggedTabView, close));
} else if (flingDirection == ScrollDirection.DRAGGING_UP ||
flingDirection == ScrollDirection.DRAGGING_DOWN) {
updateTags();
if (event != null && velocityTracker != null && thresholdReached) {
animateFling(event, flingDirection);
}
} else if (flingDirection == ScrollDirection.OVERSHOOT_DOWN) {
updateTags();
animateOvershootDown();
} else if (flingDirection == ScrollDirection.OVERSHOOT_UP) {
animateOvershootUp();
} else if (event != null && !dragHelper.hasThresholdBeenReached() &&
!closeDragHelper.hasThresholdBeenReached()) {
handleClick(event);
} else {
updateTags();
}
if (velocityTracker != null) {
velocityTracker.recycle();
velocityTracker = null;
}
}
private void handleClick(@NonNull final MotionEvent event) {
TabView tabView = getFocusedTabView(getPosition(Axis.DRAGGING_AXIS, event));
if (tabView != null) {
selectTab(tabView.tab);
}
}
private void animateOvershootDown() {
animateTilt(new AccelerateDecelerateInterpolator(), createOvershootDownAnimationListener(),
MAX_DOWN_OVERSHOOT_ANGLE);
}
private void animateOvershootUp() {
boolean tilted = animateTilt(new AccelerateInterpolator(), null, MAX_UP_OVERSHOOT_ANGLE);
if (tilted) {
enqueuePendingAction(new Runnable() {
@Override
public void run() {
animateOvershootUp(new DecelerateInterpolator());
}
});
} else {
animateOvershootUp(new AccelerateDecelerateInterpolator());
}
}
private void animateOvershootUp(@NonNull final Interpolator interpolator) {
TabView tabView = new TabView(0);
View view = tabView.view;
setPivot(Axis.DRAGGING_AXIS, view, getDefaultPivot(Axis.DRAGGING_AXIS, view));
setPivot(Axis.ORTHOGONAL_AXIS, view, getDefaultPivot(Axis.ORTHOGONAL_AXIS, view));
float position = getPosition(Axis.DRAGGING_AXIS, view);
float targetPosition = tabView.tag.projectedPosition;
long animationDuration = getResources().getInteger(android.R.integer.config_shortAnimTime);
ValueAnimator animation = ValueAnimator.ofFloat(targetPosition - position);
animation.setDuration(Math.round(animationDuration * Math.abs(
(targetPosition - position) / (float) (STACKED_TAB_COUNT * stackedTabSpacing))));
animation.addListener(createAnimationListenerWrapper(createOvershootUpAnimationListener()));
animation.setInterpolator(interpolator);
animation.setStartDelay(0);
animation.addUpdateListener(createOvershootUpAnimatorUpdateListener());
animation.start();
}
private boolean animateTilt(@NonNull final Interpolator interpolator,
@Nullable final AnimatorListener listener, final float maxAngle) {
long animationDuration = getResources().getInteger(android.R.integer.config_shortAnimTime);
Iterator iterator = new Iterator(true);
TabView tabView;
boolean result = false;
while ((tabView = iterator.next()) != null) {
if (tabView.isInflated()) {
View view = tabView.view;
if (getRotation(Axis.ORTHOGONAL_AXIS, view) != 0) {
result = true;
ViewPropertyAnimator animation = view.animate();
animation.setListener(createAnimationListenerWrapper(
createOvershootAnimationListenerWrapper(view,
iterator.hasNext() ? null :
listener))); // TODO: Iterator.hasNext() will not work
animation.setDuration(Math.round(animationDuration *
(Math.abs(getRotation(Axis.ORTHOGONAL_AXIS, view)) / maxAngle)));
animation.setInterpolator(interpolator);
animateRotation(Axis.ORTHOGONAL_AXIS, animation, 0);
animation.setStartDelay(0);
animation.start();
}
}
}
return result;
}
private void animateFling(@NonNull final MotionEvent event,
@NonNull final ScrollDirection flingDirection) {
int pointerId = event.getPointerId(0);
velocityTracker.computeCurrentVelocity(1000, maxFlingVelocity);
float flingVelocity = Math.abs(velocityTracker.getYVelocity(pointerId));
if (flingVelocity > minFlingVelocity) {
float flingDistance = 0.25f * flingVelocity;
if (flingDirection == ScrollDirection.DRAGGING_UP) {
flingDistance = -1 * flingDistance;
}
dragAnimation = new FlingAnimation(flingDistance);
dragAnimation.setFillAfter(true);
dragAnimation.setAnimationListener(createDragAnimationListener());
dragAnimation.setDuration(Math.round(Math.abs(flingDistance) / flingVelocity * 1000));
dragAnimation.setInterpolator(new DecelerateInterpolator());
startAnimation(dragAnimation);
}
}
public final void setDecorator(@NonNull final Decorator decorator) {
ensureNotNull(decorator, "The decorator may not be null");
this.decorator = decorator;
this.childViewRecycler.clearCache();
this.recyclerAdapter.clearCachedBitmaps();
}
public final Decorator getDecorator() {
ensureNotNull(decorator, "No decorator has been set", IllegalStateException.class);
return decorator;
}
public final void addListener(@NonNull final Listener listener) {
ensureNotNull(listener, "The listener may not be null");
this.listeners.add(listener);
}
public final void removeListener(@NonNull final Listener listener) {
ensureNotNull(listener, "The listener may not be null");
this.listeners.remove(listener);
}
@NonNull
public final Toolbar getToolbar() {
return toolbar;
}
public final void showToolbar(final boolean show) {
toolbar.setVisibility(show ? View.VISIBLE : View.INVISIBLE);
}
public final boolean isToolbarShown() {
return toolbar.getVisibility() == View.VISIBLE;
}
public final void setToolbarTitle(@Nullable final CharSequence title) {
toolbar.setTitle(title);
}
public final void setToolbarTitle(@StringRes final int resourceId) {
setToolbarTitle(getContext().getText(resourceId));
}
public final void inflateToolbarMenu(@MenuRes final int resourceId,
@Nullable final OnMenuItemClickListener listener) {
toolbar.inflateMenu(resourceId);
toolbar.setOnMenuItemClickListener(listener);
}
public final Menu getToolbarMenu() {
return toolbar.getMenu();
}
public static void setupWithMenu(@NonNull final TabSwitcher tabSwitcher,
@NonNull final Menu menu,
@Nullable final OnClickListener listener) {
ensureNotNull(tabSwitcher, "The tab switcher may not be null");
ensureNotNull(menu, "The menu may not be null");
for (int i = 0; i < menu.size(); i++) {
MenuItem menuItem = menu.getItem(i);
View view = menuItem.getActionView();
if (view instanceof TabSwitcherButton) {
TabSwitcherButton tabSwitcherButton = (TabSwitcherButton) view;
tabSwitcherButton.setOnClickListener(listener);
tabSwitcherButton.setCount(tabSwitcher.getCount());
tabSwitcher.addListener(tabSwitcherButton);
}
}
}
public final void setToolbarNavigationIcon(@Nullable final Drawable icon,
@Nullable final OnClickListener listener) {
toolbar.setNavigationIcon(icon);
toolbar.setNavigationOnClickListener(listener);
}
public final void setToolbarNavigationIcon(@DrawableRes final int resourceId,
@Nullable final OnClickListener listener) {
setToolbarNavigationIcon(ContextCompat.getDrawable(getContext(), resourceId), listener);
}
@Override
public final void setPadding(final int left, final int top, final int right, final int bottom) {
padding = new int[]{left, top, right, bottom};
LayoutParams toolbarLayoutParams = (LayoutParams) toolbar.getLayoutParams();
toolbarLayoutParams.setMargins(left, top, right, 0);
Iterator iterator = new Iterator();
TabView tabView;
while ((tabView = iterator.next()) != null) {
ViewHolder viewHolder = tabView.viewHolder;
if (viewHolder != null) {
adaptChildAndPreviewMargins(viewHolder);
}
}
}
private void adaptChildAndPreviewMargins(@NonNull final ViewHolder viewHolder) {
if (viewHolder.child != null) {
LayoutParams childLayoutParams = (LayoutParams) viewHolder.child.getLayoutParams();
childLayoutParams.setMargins(getPaddingLeft(), getPaddingTop(), getPaddingRight(),
getPaddingBottom());
}
LayoutParams previewLayoutParams =
(LayoutParams) viewHolder.previewImageView.getLayoutParams();
previewLayoutParams.setMargins(getPaddingLeft(), getPaddingTop(), getPaddingRight(),
getPaddingBottom());
}
@Override
public final void onTitleChanged(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index != -1) {
TabView tabView = new TabView(index);
if (tabView.isInflated()) {
ViewHolder viewHolder = tabView.viewHolder;
adaptTitle(viewHolder, tab);
}
}
}
private void adaptTitle(@NonNull final ViewHolder viewHolder, @NonNull final Tab tab) {
viewHolder.titleTextView.setText(tab.getTitle());
}
@Override
public final void onIconChanged(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index != -1) {
TabView tabView = new TabView(index);
if (tabView.isInflated()) {
ViewHolder viewHolder = tabView.viewHolder;
adaptIcon(viewHolder, tab);
}
}
}
private void adaptIcon(@NonNull final ViewHolder viewHolder, @NonNull final Tab tab) {
viewHolder.titleTextView
.setCompoundDrawablesWithIntrinsicBounds(tab.getIcon(getContext()), null, null,
null);
}
@Override
public final void onCloseableChanged(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index != -1) {
TabView tabView = new TabView(index);
if (tabView.isInflated()) {
ViewHolder viewHolder = tabView.viewHolder;
adaptCloseButton(viewHolder, tab);
}
}
}
private void adaptCloseButton(@NonNull final ViewHolder viewHolder, @NonNull final Tab tab) {
viewHolder.closeButton.setVisibility(tab.isCloseable() ? View.VISIBLE : View.GONE);
viewHolder.closeButton
.setOnClickListener(tab.isCloseable() ? createCloseButtonClickListener(tab) : null);
}
@Override
public final void onColorChanged(@NonNull final Tab tab) {
int index = indexOf(tab);
if (index != -1) {
TabView tabView = new TabView(index);
if (tabView.isInflated()) {
View view = tabView.view;
ViewHolder viewHolder = tabView.viewHolder;
adaptColor(view, viewHolder, tab);
}
}
}
private void adaptColor(@NonNull final View view, @NonNull final ViewHolder viewHolder,
@NonNull final Tab tab) {
int color = tab.getColor();
Drawable background = view.getBackground();
background
.setColorFilter(color != -1 ? color : tabBackgroundColor, PorterDuff.Mode.MULTIPLY);
Drawable border = viewHolder.borderView.getBackground();
border.setColorFilter(color != -1 ? color : tabBackgroundColor, PorterDuff.Mode.MULTIPLY);
}
@Override
public final int getPaddingLeft() {
return padding[0];
}
@Override
public final int getPaddingTop() {
return padding[1];
}
@Override
public final int getPaddingRight() {
return padding[2];
}
@Override
public final int getPaddingBottom() {
return padding[3];
}
@Override
public final int getPaddingStart() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
return getLayoutDirection() == LAYOUT_DIRECTION_RTL ? getPaddingRight() :
getPaddingLeft();
}
return getPaddingLeft();
}
@Override
public final int getPaddingEnd() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
return getLayoutDirection() == LAYOUT_DIRECTION_RTL ? getPaddingLeft() :
getPaddingRight();
}
return getPaddingRight();
}
@Override
public final void onGlobalLayout() {
ViewUtil.removeOnGlobalLayoutListener(getViewTreeObserver(), this);
if (selectedTabIndex != -1) {
TabView tabView = new TabView(selectedTabIndex);
viewRecycler.inflate(tabView);
}
}
} | Refactoring.
| library/src/main/java/de/mrapp/android/tabswitcher/TabSwitcher.java | Refactoring. |
|
Java | apache-2.0 | 39e37d0e9a7e0d50b6608bcb1d2d9e924dd340f3 | 0 | bjagg/CalendarPortlet,manhattancollege/up-calendar-portlet,manhattancollege/up-calendar-portlet,bjagg/CalendarPortlet,Jasig/CalendarPortlet,Jasig/CalendarPortlet | /*
* Created on May 19, 2008
*
* Copyright(c) The University of Manchester, May 19, 2008. All rights reserved.
* (See licensing and redistribution disclosures at end of this file.)
*
*/
package edu.yale.its.tp.portlets.calendar.service;
import java.io.IOException;
import java.util.Map;
import javax.portlet.PortletRequest;
import javax.portlet.PortletSession;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.xml.sax.SAXException;
import edu.yale.its.tp.cas.client.CASReceipt;
import edu.yale.its.tp.cas.portlet.ProxyTicketService;
/**
* CachedCredentialsInitializationService initializes authentication resources when a
* user creates a new session with the portlet.
*
* @author Anthony Colebourne
*/
public class CachedCredentialsInitializationService implements IInitializationService {
private static Log log = LogFactory.getLog(CachedCredentialsInitializationService.class);
/*
* (non-Javadoc)
* @see edu.yale.its.tp.portlets.calendar.service.IInitializationService#initialize(javax.portlet.PortletRequest)
*/
public void initialize(javax.portlet.PortletRequest request) {
// get the UserInfo map from the portlet session
PortletSession session = request.getPortletSession();
Map userinfo = (Map) request.getAttribute(PortletRequest.USER_INFO);
// get the credentials for this portlet from the UserInfo map
String password = (String) userinfo.get("password");
session.setAttribute("password", password, PortletSession.APPLICATION_SCOPE);
}
}
/*
* CachedCredentialsInitializationService.java
*
* Copyright (c) Feb 13, 2008 The University of Manchester. All rights reserved.
*
* THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE, ARE EXPRESSLY DISCLAIMED. IN NO EVENT SHALL
* MANCHESTER UNIVERSITY OR ITS EMPLOYEES BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED, THE COSTS OF PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED IN ADVANCE OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Redistribution and use of this software in source or binary forms, with or
* without modification, are permitted, provided that the following conditions
* are met.
*
* 1. Any redistribution must include the above copyright notice and disclaimer
* and this list of conditions in any related documentation and, if feasible, in
* the redistributed software.
*
* 2. Any redistribution must include the acknowledgment, "This product includes
* software developed by The University of Manchester," in any related documentation and, if
* feasible, in the redistributed software.
*
* 3. The names "The University of Manchester" and "Manchester University" must not be used to endorse or
* promote products derived from this software.
*/ | src/main/java/edu/yale/its/tp/portlets/calendar/service/CachedCredentialsInitializationService.java | /*
* Created on May 19, 2008
*
* Copyright(c) The University of Manchester, May 19, 2008. All rights reserved.
* (See licensing and redistribution disclosures at end of this file.)
*
*/
package edu.yale.its.tp.portlets.calendar.service;
import java.io.IOException;
import java.util.Map;
import javax.portlet.PortletRequest;
import javax.portlet.PortletSession;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.xml.sax.SAXException;
import edu.yale.its.tp.cas.client.CASReceipt;
import edu.yale.its.tp.cas.portlet.ProxyTicketService;
/**
* CachedCredentialsInitializationService initializes authentication resources when a
* user creates a new session with the portlet.
*
* @author Anthony Colebourne
*/
public class CachedCredentialsInitializationService implements IInitializationService {
private static Log log = LogFactory.getLog(CachedCredentialsInitializationService.class);
/*
* (non-Javadoc)
* @see edu.yale.its.tp.portlets.calendar.service.IInitializationService#initialize(javax.portlet.PortletRequest)
*/
public void initialize(javax.portlet.PortletRequest request) {
// get the UserInfo map from the portlet session
PortletSession session = request.getPortletSession();
Map userinfo = (Map) request.getAttribute(PortletRequest.USER_INFO);
// get the credentials for this portlet from the UserInfo map
String password = (String) userinfo.get("password");
session.setAttribute("password", password, PortletSession.APPLICATION_SCOPE);
session.setAttribute("username", request.getRemoteUser(), PortletSession.APPLICATION_SCOPE);
}
}
/*
* CachedCredentialsInitializationService.java
*
* Copyright (c) Feb 13, 2008 The University of Manchester. All rights reserved.
*
* THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE, ARE EXPRESSLY DISCLAIMED. IN NO EVENT SHALL
* MANCHESTER UNIVERSITY OR ITS EMPLOYEES BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED, THE COSTS OF PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED IN ADVANCE OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Redistribution and use of this software in source or binary forms, with or
* without modification, are permitted, provided that the following conditions
* are met.
*
* 1. Any redistribution must include the above copyright notice and disclaimer
* and this list of conditions in any related documentation and, if feasible, in
* the redistributed software.
*
* 2. Any redistribution must include the acknowledgment, "This product includes
* software developed by The University of Manchester," in any related documentation and, if
* feasible, in the redistributed software.
*
* 3. The names "The University of Manchester" and "Manchester University" must not be used to endorse or
* promote products derived from this software.
*/ | NOJIRA Service should not store user token in session, it's already available under subscribeId
git-svn-id: def15615b07c92cc9e7b9ca2bf636977f2cd2758@16429 f5dbab47-78f9-eb45-b975-e544023573eb
| src/main/java/edu/yale/its/tp/portlets/calendar/service/CachedCredentialsInitializationService.java | NOJIRA Service should not store user token in session, it's already available under subscribeId |
|
Java | apache-2.0 | 3186c36d985866cb44b47362cfebb28ca5c18403 | 0 | noear/Weed3,noear/Weed3 | package org.noear.weed;
public class WhereQ extends WhereBase<WhereQ> {
protected WhereQ(DbContext context) {
super(context);
}
protected WhereQ(DbTableQuery query){
super();
_context = query._context;
_builder = query._builder;
}
}
| weed3/src/main/java/org/noear/weed/WhereQ.java | package org.noear.weed;
public class WhereQ extends WhereBase<WhereQ> {
public WhereQ(DbContext context) {
super(context);
}
public WhereQ(DbTableQuery query){
super();
_context = query._context;
_builder = query._builder;
}
}
| 3.2.3.10
| weed3/src/main/java/org/noear/weed/WhereQ.java | 3.2.3.10 |
|
Java | apache-2.0 | eab6a2c672634266e09a30f90d716fb98844ca0c | 0 | dslomov/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,robovm/robovm-studio,caot/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,izonder/intellij-community,salguarnieri/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,signed/intellij-community,diorcety/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,fnouama/intellij-community,samthor/intellij-community,jagguli/intellij-community,holmes/intellij-community,semonte/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,fitermay/intellij-community,signed/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,apixandru/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,ibinti/intellij-community,FHannes/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,signed/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,FHannes/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,caot/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,diorcety/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,holmes/intellij-community,hurricup/intellij-community,fitermay/intellij-community,samthor/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,clumsy/intellij-community,apixandru/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,caot/intellij-community,robovm/robovm-studio,izonder/intellij-community,tmpgit/intellij-community,supersven/intellij-community,tmpgit/intellij-community,caot/intellij-community,da1z/intellij-community,petteyg/intellij-community,robovm/robovm-studio,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,xfournet/intellij-community,da1z/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,semonte/intellij-community,petteyg/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,da1z/intellij-community,signed/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,fitermay/intellij-community,FHannes/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,allotria/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,fnouama/intellij-community,robovm/robovm-studio,blademainer/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,signed/intellij-community,wreckJ/intellij-community,samthor/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,retomerz/intellij-community,samthor/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,ibinti/intellij-community,xfournet/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,signed/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,izonder/intellij-community,diorcety/intellij-community,samthor/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,slisson/intellij-community,FHannes/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,holmes/intellij-community,fnouama/intellij-community,da1z/intellij-community,slisson/intellij-community,dslomov/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,kool79/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,diorcety/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,da1z/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,izonder/intellij-community,vladmm/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,adedayo/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,semonte/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,caot/intellij-community,dslomov/intellij-community,holmes/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,allotria/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,caot/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,da1z/intellij-community,hurricup/intellij-community,kool79/intellij-community,clumsy/intellij-community,vladmm/intellij-community,fnouama/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,samthor/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,allotria/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,clumsy/intellij-community,vladmm/intellij-community,xfournet/intellij-community,retomerz/intellij-community,signed/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,asedunov/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,allotria/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,blademainer/intellij-community,adedayo/intellij-community,fitermay/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,supersven/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,adedayo/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,kool79/intellij-community,xfournet/intellij-community,kdwink/intellij-community,amith01994/intellij-community,apixandru/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,fitermay/intellij-community,ibinti/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,supersven/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,izonder/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,izonder/intellij-community,xfournet/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,ryano144/intellij-community,signed/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,samthor/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,FHannes/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,kool79/intellij-community,ibinti/intellij-community,hurricup/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,youdonghai/intellij-community,samthor/intellij-community,kool79/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,signed/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,semonte/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,semonte/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,kool79/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,allotria/intellij-community,fitermay/intellij-community,hurricup/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,ryano144/intellij-community,signed/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ryano144/intellij-community,caot/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,clumsy/intellij-community,diorcety/intellij-community,asedunov/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,allotria/intellij-community,fnouama/intellij-community,adedayo/intellij-community,FHannes/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,kdwink/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,jagguli/intellij-community,diorcety/intellij-community,FHannes/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,izonder/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,kool79/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,robovm/robovm-studio,da1z/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,adedayo/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,allotria/intellij-community,da1z/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,allotria/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,caot/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,petteyg/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,amith01994/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,izonder/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,amith01994/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,xfournet/intellij-community,holmes/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,caot/intellij-community,allotria/intellij-community,retomerz/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,holmes/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,signed/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,kool79/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,hurricup/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,izonder/intellij-community,signed/intellij-community | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.codeStyle;
import com.intellij.formatting.*;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.lang.*;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.Indent;
import com.intellij.psi.impl.CheckUtil;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.tree.*;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.util.CharTable;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class CodeStyleManagerImpl extends CodeStyleManager {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.codeStyle.CodeStyleManagerImpl");
private static final ThreadLocal<ProcessingUnderProgressInfo> SEQUENTIAL_PROCESSING_ALLOWED
= new ThreadLocal<ProcessingUnderProgressInfo>()
{
@Override
protected ProcessingUnderProgressInfo initialValue() {
return new ProcessingUnderProgressInfo();
}
};
private final Project myProject;
@NonNls private static final String DUMMY_IDENTIFIER = "xxx";
public CodeStyleManagerImpl(Project project) {
myProject = project;
}
@Override
@NotNull
public Project getProject() {
return myProject;
}
@Override
@NotNull
public PsiElement reformat(@NotNull PsiElement element) throws IncorrectOperationException {
return reformat(element, false);
}
@Override
@NotNull
public PsiElement reformat(@NotNull PsiElement element, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException {
CheckUtil.checkWritable(element);
if( !SourceTreeToPsiMap.hasTreeElement( element ) )
{
return element;
}
ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(element);
final PsiElement formatted = SourceTreeToPsiMap.treeElementToPsi(new CodeFormatterFacade(getSettings()).processElement(treeElement));
if (!canChangeWhiteSpacesOnly) {
return postProcessElement(formatted);
} else {
return formatted;
}
}
private PsiElement postProcessElement(@NotNull final PsiElement formatted) {
PsiElement result = formatted;
for (PostFormatProcessor postFormatProcessor : Extensions.getExtensions(PostFormatProcessor.EP_NAME)) {
result = postFormatProcessor.processElement(result, getSettings());
}
return result;
}
private void postProcessText(@NotNull final PsiFile file, @NotNull final TextRange textRange) {
TextRange currentRange = textRange;
for (final PostFormatProcessor myPostFormatProcessor : Extensions.getExtensions(PostFormatProcessor.EP_NAME)) {
currentRange = myPostFormatProcessor.processText(file, currentRange, getSettings());
}
}
@Override
public PsiElement reformatRange(@NotNull PsiElement element,
int startOffset,
int endOffset,
boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException {
return reformatRangeImpl(element, startOffset, endOffset, canChangeWhiteSpacesOnly);
}
@Override
public PsiElement reformatRange(@NotNull PsiElement element, int startOffset, int endOffset)
throws IncorrectOperationException {
return reformatRangeImpl(element, startOffset, endOffset, false);
}
private static void transformAllChildren(final ASTNode file) {
((TreeElement)file).acceptTree(new RecursiveTreeElementWalkingVisitor() {
});
}
@Override
public void reformatText(@NotNull PsiFile file, int startOffset, int endOffset) throws IncorrectOperationException {
reformatText(file, Collections.singleton(new TextRange(startOffset, endOffset)));
}
@Override
public void reformatText(@NotNull PsiFile file, @NotNull Collection<TextRange> ranges)
throws IncorrectOperationException {
reformatText(file, ranges, null);
}
public void reformatText(@NotNull PsiFile file, @NotNull Collection<TextRange> ranges, @Nullable Editor editor) throws IncorrectOperationException {
if (ranges.isEmpty()) {
return;
}
ApplicationManager.getApplication().assertWriteAccessAllowed();
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
CheckUtil.checkWritable(file);
if (!SourceTreeToPsiMap.hasTreeElement(file)) {
return;
}
ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(file);
transformAllChildren(treeElement);
final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(getSettings());
LOG.assertTrue(file.isValid());
if (editor == null) {
editor = PsiUtilBase.findEditor(file);
}
// There is a possible case that cursor is located at the end of the line that contains only white spaces. For example:
// public void foo() {
// <caret>
// }
// Formatter removes such white spaces, i.e. keeps only line feed symbol. But we want to preserve caret position then.
// So, we check if it should be preserved and restore it after formatting if necessary
int visualColumnToRestore = -1;
if (editor != null) {
Document document = editor.getDocument();
int caretOffset = editor.getCaretModel().getOffset();
caretOffset = Math.max(Math.min(caretOffset, document.getTextLength() - 1), 0);
CharSequence text = document.getCharsSequence();
int caretLine = document.getLineNumber(caretOffset);
int lineStartOffset = document.getLineStartOffset(caretLine);
int lineEndOffset = document.getLineEndOffset(caretLine);
boolean fixCaretPosition = true;
for (int i = lineStartOffset; i < lineEndOffset; i++) {
char c = text.charAt(i);
if (c != ' ' && c != '\t' && c != '\n') {
fixCaretPosition = false;
break;
}
}
if (fixCaretPosition) {
visualColumnToRestore = editor.getCaretModel().getVisualPosition().column;
}
}
final SmartPointerManager smartPointerManager = SmartPointerManager.getInstance(getProject());
List<RangeFormatInfo> infos = new ArrayList<RangeFormatInfo>();
for (TextRange range : ranges) {
final PsiElement start = findElementInTreeWithFormatterEnabled(file, range.getStartOffset());
final PsiElement end = findElementInTreeWithFormatterEnabled(file, range.getEndOffset());
if (start != null && !start.isValid()) {
LOG.error("start=" + start + "; file=" + file);
}
if (end != null && !end.isValid()) {
LOG.error("end=" + start + "; end=" + file);
}
boolean formatFromStart = range.getStartOffset() == 0;
boolean formatToEnd = range.getEndOffset() == file.getTextLength();
infos.add(new RangeFormatInfo(
start == null ? null : smartPointerManager.createSmartPsiElementPointer(start),
end == null ? null : smartPointerManager.createSmartPsiElementPointer(end),
formatFromStart,
formatToEnd
));
}
FormatTextRanges formatRanges = new FormatTextRanges();
for (TextRange range : ranges) {
formatRanges.add(range, true);
}
codeFormatter.processText(file, formatRanges, true);
for (RangeFormatInfo info : infos) {
final PsiElement startElement = info.startPointer == null ? null : info.startPointer.getElement();
final PsiElement endElement = info.endPointer == null ? null : info.endPointer.getElement();
if ((startElement != null || info.fromStart) && (endElement != null || info.toEnd)) {
postProcessText(file, new TextRange(info.fromStart ? 0 : startElement.getTextRange().getStartOffset(),
info.toEnd ? file.getTextLength() : endElement.getTextRange().getEndOffset()));
}
}
if (editor == null) {
return;
}
if (visualColumnToRestore < 0) {
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
return;
}
CaretModel caretModel = editor.getCaretModel();
VisualPosition position = caretModel.getVisualPosition();
if (visualColumnToRestore != position.column) {
caretModel.moveToVisualPosition(new VisualPosition(position.line, visualColumnToRestore));
}
}
private PsiElement reformatRangeImpl(final PsiElement element,
final int startOffset,
final int endOffset,
boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException {
LOG.assertTrue(element.isValid());
CheckUtil.checkWritable(element);
if( !SourceTreeToPsiMap.hasTreeElement( element ) )
{
return element;
}
ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(element);
final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(getSettings());
final PsiElement formatted = SourceTreeToPsiMap.treeElementToPsi(codeFormatter.processRange(treeElement, startOffset, endOffset));
return canChangeWhiteSpacesOnly ? formatted : postProcessElement(formatted);
}
@Override
public void reformatNewlyAddedElement(@NotNull final ASTNode parent, @NotNull final ASTNode addedElement) throws IncorrectOperationException {
LOG.assertTrue(addedElement.getTreeParent() == parent, "addedElement must be added to parent");
final PsiElement psiElement = parent.getPsi();
PsiFile containingFile = psiElement.getContainingFile();
final FileViewProvider fileViewProvider = containingFile.getViewProvider();
if (fileViewProvider instanceof MultiplePsiFilesPerDocumentFileViewProvider) {
containingFile = fileViewProvider.getPsi(fileViewProvider.getBaseLanguage());
}
TextRange textRange = addedElement.getTextRange();
final Document document = fileViewProvider.getDocument();
if (document instanceof DocumentWindow) {
containingFile = InjectedLanguageManager.getInstance(containingFile.getProject()).getTopLevelFile(containingFile);
textRange = ((DocumentWindow)document).injectedToHost(textRange);
}
final FormattingModelBuilder builder = LanguageFormatting.INSTANCE.forContext(containingFile);
if (builder != null) {
final FormattingModel model = CoreFormatterUtil.buildModel(builder, containingFile, getSettings(), FormattingMode.REFORMAT);
FormatterEx.getInstanceEx().formatAroundRange(model, getSettings(), textRange, containingFile.getFileType());
}
adjustLineIndent(containingFile, textRange);
}
@Override
public int adjustLineIndent(@NotNull final PsiFile file, final int offset) throws IncorrectOperationException {
return PostprocessReformattingAspect.getInstance(file.getProject()).disablePostprocessFormattingInside(new Computable<Integer>() {
@Override
public Integer compute() {
return doAdjustLineIndentByOffset(file, offset);
}
});
}
@Nullable
static PsiElement findElementInTreeWithFormatterEnabled(final PsiFile file, final int offset) {
final PsiElement bottomost = file.findElementAt(offset);
if (bottomost != null && LanguageFormatting.INSTANCE.forContext(bottomost) != null){
return bottomost;
}
final Language fileLang = file.getLanguage();
if (fileLang instanceof CompositeLanguage) {
return file.getViewProvider().findElementAt(offset, fileLang);
}
return bottomost;
}
@Override
public int adjustLineIndent(@NotNull final Document document, final int offset) {
return PostprocessReformattingAspect.getInstance(getProject()).disablePostprocessFormattingInside(new Computable<Integer>() {
@Override
public Integer compute() {
final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject);
documentManager.commitDocument(document);
PsiFile file = documentManager.getPsiFile(document);
if (file == null) return offset;
return doAdjustLineIndentByOffset(file, offset);
}
});
}
private int doAdjustLineIndentByOffset(@NotNull PsiFile file, int offset) {
return new CodeStyleManagerRunnable<Integer>(this, FormattingMode.ADJUST_INDENT) {
@Override
protected Integer doPerform(int offset, TextRange range) {
return FormatterEx.getInstanceEx().adjustLineIndent(myModel, mySettings, myIndentOptions, offset, mySignificantRange);
}
@Override
protected Integer computeValueInsidePlainComment(PsiFile file, int offset, Integer defaultValue) {
return CharArrayUtil.shiftForward(file.getViewProvider().getContents(), offset, " \t");
}
@Override
protected Integer adjustResultForInjected(Integer result, DocumentWindow documentWindow) {
return documentWindow.hostToInjected(result);
}
}.perform(file, offset, null, offset);
}
@Override
public void adjustLineIndent(@NotNull PsiFile file, TextRange rangeToAdjust) throws IncorrectOperationException {
new CodeStyleManagerRunnable<Object>(this, FormattingMode.ADJUST_INDENT) {
@Override
protected Object doPerform(int offset, TextRange range) {
FormatterEx.getInstanceEx().adjustLineIndentsForRange(myModel, mySettings, myIndentOptions, range);
return null;
}
}.perform(file, -1, rangeToAdjust, null);
}
@Override
@Nullable
public String getLineIndent(@NotNull PsiFile file, int offset) {
return new CodeStyleManagerRunnable<String>(this, FormattingMode.ADJUST_INDENT) {
@Override
protected boolean useDocumentBaseFormattingModel() {
return false;
}
@Override
protected String doPerform(int offset, TextRange range) {
return FormatterEx.getInstanceEx().getLineIndent(myModel, mySettings, myIndentOptions, offset, mySignificantRange);
}
}.perform(file, offset, null, null);
}
@Override
@Nullable
public String getLineIndent(@NotNull Document document, int offset) {
PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(document);
if (file == null) return "";
return getLineIndent(file, offset);
}
@Override
public boolean isLineToBeIndented(@NotNull PsiFile file, int offset) {
if (!SourceTreeToPsiMap.hasTreeElement(file)) {
return false;
}
CharSequence chars = file.getViewProvider().getContents();
int start = CharArrayUtil.shiftBackward(chars, offset - 1, " \t");
if (start > 0 && chars.charAt(start) != '\n' && chars.charAt(start) != '\r') {
return false;
}
int end = CharArrayUtil.shiftForward(chars, offset, " \t");
if (end >= chars.length()) {
return false;
}
ASTNode element = SourceTreeToPsiMap.psiElementToTree(findElementInTreeWithFormatterEnabled(file, end));
if (element == null) {
return false;
}
if (element.getElementType() == TokenType.WHITE_SPACE) {
return false;
}
if (element.getElementType() == PlainTextTokenTypes.PLAIN_TEXT) {
return false;
}
/*
if( element.getElementType() instanceof IJspElementType )
{
return false;
}
*/
if (getSettings().KEEP_FIRST_COLUMN_COMMENT && isCommentToken(element)) {
if (IndentHelper.getInstance().getIndent(myProject, file.getFileType(), element, true) == 0) {
return false;
}
}
return true;
}
private static boolean isCommentToken(final ASTNode element) {
final Language language = element.getElementType().getLanguage();
final Commenter commenter = LanguageCommenters.INSTANCE.forLanguage(language);
if (commenter instanceof CodeDocumentationAwareCommenter) {
final CodeDocumentationAwareCommenter documentationAwareCommenter = (CodeDocumentationAwareCommenter)commenter;
return element.getElementType() == documentationAwareCommenter.getBlockCommentTokenType() ||
element.getElementType() == documentationAwareCommenter.getLineCommentTokenType();
}
return false;
}
private static boolean isWhiteSpaceSymbol(char c) {
return c == ' ' || c == '\t' || c == '\n';
}
/**
* Formatter trims line that contains white spaces symbols only, however, there is a possible case that we want
* to preserve them for particular line (e.g. for live template that defines blank line that contains $END$ marker).
* <p/>
* Current approach is to do the following:
* <pre>
* <ol>
* <li>Insert dummy text at the end of the blank line which white space symbols should be preserved;</li>
* <li>Perform formatting;</li>
* <li>Remove dummy text;</li>
* </ol>
* </pre>
* <p/>
* This method inserts that dummy text if necessary (if target line contains white space symbols only).
* <p/>
* Please note that it tries to do that via PSI at first (checks if given offset points to
* {@link TokenType#WHITE_SPACE white space element} and inserts dummy text as dedicated element if necessary) and,
* in case of the negative answer, tries to perform the examination considering document just as a sequence of characters
* and assuming that white space symbols are white spaces, tabulations and line feeds. The rationale for such an approach is:
* <pre>
* <ul>
* <li>
* there is a possible case that target language considers symbols over than white spaces, tabulations and line feeds
* to be white spaces and the answer lays at PSI structure of the file;
* </li>
* <li>
* dummy text inserted during PSI-based processing has {@link TokenType#NEW_LINE_INDENT special type} that may be treated
* specifically during formatting;
* </li>
* </ul>
* </pre>
* <p/>
* <b>Note:</b> it's expected that the whole white space region that contains given offset is processed in a way that all
* {@link RangeMarker range markers} registered for the given offset are expanded to the whole white space region.
* E.g. there is a possible case that particular range marker serves for defining formatting range, hence, its start/end offsets
* are updated correspondingly after current method call and whole white space region is reformatted.
*
* @param file target PSI file
* @param document target document
* @param offset offset that defines end boundary of the target line text fragment (start boundary is the first line's symbol)
* @return text range that points to the newly inserted dummy text if any; <code>null</code> otherwise
* @throws IncorrectOperationException if given file is read-only
*/
@Nullable
public static TextRange insertNewLineIndentMarker(@NotNull PsiFile file, @NotNull Document document, int offset)
throws IncorrectOperationException
{
TextRange result = insertNewLineIndentMarker(file, offset);
if (result == null) {
result = insertNewLineIndentMarker(document, offset);
}
return result;
}
@Nullable
private static TextRange insertNewLineIndentMarker(@NotNull PsiFile file, int offset) throws IncorrectOperationException {
CheckUtil.checkWritable(file);
final Pair<PsiElement, CharTable> pair = doFindWhiteSpaceNode(file, offset);
PsiElement element = pair.first;
if (element == null) {
return null;
}
ASTNode node = SourceTreeToPsiMap.psiElementToTree(element);
if (node == null) {
return null;
}
ASTNode parent = node.getTreeParent();
int elementStart = element.getTextRange().getStartOffset();
int rangeShift = 0;
if (element.getContainingFile() != null) {
// Map injected element offset to the real file offset.
rangeShift = InjectedLanguageManager.getInstance(file.getProject()).injectedToHost(element, elementStart) - elementStart;
elementStart += rangeShift;
}
if (elementStart > offset) {
return null;
}
// We don't want to insert a marker if target line is not blank (doesn't consist from white space symbols only).
if (offset == elementStart) {
for (ASTNode prev = TreeUtil.prevLeaf(node); ; prev = TreeUtil.prevLeaf(prev)) {
if (prev == null) {
return null;
}
if (prev.getTextRange().isEmpty()) {
continue;
}
if (prev.getElementType() != TokenType.WHITE_SPACE) {
return null;
}
}
}
CharTable charTable = pair.second;
ASTNode marker;
// The thing is that we have a sub-system that monitors tree changes and marks newly generated elements for postponed
// formatting (PostprocessReformattingAspect). In case of injected context that results in marking whole injected region
// in case its sub-range is changed.
//
// We want to avoid that here, so, temporarily suppress that functionality.
CodeEditUtil.setAllowSuspendNodesReformatting(false);
try {
ASTNode space1 = splitSpaceElement((TreeElement)element, offset - elementStart, charTable);
marker = Factory.createSingleLeafElement(TokenType.NEW_LINE_INDENT, DUMMY_IDENTIFIER, charTable, file.getManager());
setSequentialProcessingAllowed(false);
parent.addChild(marker, space1.getTreeNext());
}
finally {
CodeEditUtil.setAllowSuspendNodesReformatting(true);
}
PsiElement psiElement = SourceTreeToPsiMap.treeElementToPsi(marker);
return psiElement == null ? null : psiElement.getTextRange().shiftRight(rangeShift);
}
@Nullable
private static TextRange insertNewLineIndentMarker(@NotNull Document document, final int offset) {
CharSequence text = document.getCharsSequence();
if (offset < 0 || offset >= text.length() || !isWhiteSpaceSymbol(text.charAt(offset))) {
return null;
}
int start = offset;
for (int i = offset - 1; i >= 0; i--) {
char c = text.charAt(i);
// We don't want to insert a marker if target line is not blank (doesn't consist from white space symbols only).
if (c == '\n') {
break;
}
if (!isWhiteSpaceSymbol(c)) {
return null;
}
start = i;
}
int end = offset;
for (; end < text.length(); end++) {
if (!isWhiteSpaceSymbol(text.charAt(end))) {
break;
}
}
StringBuilder buffer = new StringBuilder();
buffer.append(text.subSequence(start, end));
// Modify the document in order to expand range markers pointing to the given offset to the whole white space range.
document.deleteString(start, end);
document.insertString(start, buffer);
setSequentialProcessingAllowed(false);
document.insertString(offset, DUMMY_IDENTIFIER);
return new TextRange(offset, offset + DUMMY_IDENTIFIER.length());
}
/**
* Allows to check if given offset points to white space element within the given PSI file and return that white space
* element in the case of positive answer.
*
* @param file target file
* @param offset offset that might point to white space element within the given PSI file
* @return target white space element for the given offset within the given file (if any); <code>null</code> otherwise
*/
@Nullable
public static PsiElement findWhiteSpaceNode(@NotNull PsiFile file, int offset) {
return doFindWhiteSpaceNode(file, offset).first;
}
@NotNull
private static Pair<PsiElement, CharTable> doFindWhiteSpaceNode(@NotNull PsiFile file, int offset) {
ASTNode astNode = SourceTreeToPsiMap.psiElementToTree(file);
if (!(astNode instanceof FileElement)) {
return new Pair<PsiElement, CharTable>(null, null);
}
PsiElement elementAt = InjectedLanguageUtil.findInjectedElementNoCommit(file, offset);
final CharTable charTable = ((FileElement)astNode).getCharTable();
if (elementAt == null) {
elementAt = findElementInTreeWithFormatterEnabled(file, offset);
}
if( elementAt == null) {
return new Pair<PsiElement, CharTable>(null, charTable);
}
ASTNode node = elementAt.getNode();
if (node == null || node.getElementType() != TokenType.WHITE_SPACE) {
return new Pair<PsiElement, CharTable>(null, charTable);
}
return new Pair<PsiElement, CharTable>(elementAt, charTable);
}
@Override
public Indent getIndent(String text, FileType fileType) {
int indent = IndentHelperImpl.getIndent(myProject, fileType, text, true);
int indenLevel = indent / IndentHelperImpl.INDENT_FACTOR;
int spaceCount = indent - indenLevel * IndentHelperImpl.INDENT_FACTOR;
return new IndentImpl(getSettings(), indenLevel, spaceCount, fileType);
}
@Override
public String fillIndent(Indent indent, FileType fileType) {
IndentImpl indent1 = (IndentImpl)indent;
int indentLevel = indent1.getIndentLevel();
int spaceCount = indent1.getSpaceCount();
if (indentLevel < 0) {
spaceCount += indentLevel * getSettings().getIndentSize(fileType);
indentLevel = 0;
if (spaceCount < 0) {
spaceCount = 0;
}
}
else {
if (spaceCount < 0) {
int v = (-spaceCount + getSettings().getIndentSize(fileType) - 1) / getSettings().getIndentSize(fileType);
indentLevel -= v;
spaceCount += v * getSettings().getIndentSize(fileType);
if (indentLevel < 0) {
indentLevel = 0;
}
}
}
return IndentHelperImpl.fillIndent(myProject, fileType, indentLevel * IndentHelperImpl.INDENT_FACTOR + spaceCount);
}
@Override
public Indent zeroIndent() {
return new IndentImpl(getSettings(), 0, 0, null);
}
private static ASTNode splitSpaceElement(TreeElement space, int offset, CharTable charTable) {
LOG.assertTrue(space.getElementType() == TokenType.WHITE_SPACE);
CharSequence chars = space.getChars();
LeafElement space1 = Factory.createSingleLeafElement(TokenType.WHITE_SPACE, chars, 0, offset, charTable, SharedImplUtil.getManagerByTree(space));
LeafElement space2 = Factory.createSingleLeafElement(TokenType.WHITE_SPACE, chars, offset, chars.length(), charTable, SharedImplUtil.getManagerByTree(space));
ASTNode parent = space.getTreeParent();
parent.replaceChild(space, space1);
parent.addChild(space2, space1.getTreeNext());
return space1;
}
@NotNull
private CodeStyleSettings getSettings() {
return CodeStyleSettingsManager.getSettings(myProject);
}
@Override
public boolean isSequentialProcessingAllowed() {
return SEQUENTIAL_PROCESSING_ALLOWED.get().isAllowed();
}
/**
* Allows to define if {@link #isSequentialProcessingAllowed() sequential processing} should be allowed.
* <p/>
* Current approach is not allow to stop sequential processing for more than predefine amount of time (couple of seconds).
* That means that call to this method with <code>'true'</code> argument is not mandatory for successful processing even
* if this method is called with <code>'false'</code> argument before.
*
* @param allowed flag that defines if {@link #isSequentialProcessingAllowed() sequential processing} should be allowed
*/
public static void setSequentialProcessingAllowed(boolean allowed) {
ProcessingUnderProgressInfo info = SEQUENTIAL_PROCESSING_ALLOWED.get();
if (allowed) {
info.decrement();
}
else {
info.increment();
}
}
private static class ProcessingUnderProgressInfo {
private static final long DURATION_TIME = TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS);
private int myCount;
private long myEndTime;
public void increment() {
if (myCount > 0 && System.currentTimeMillis() > myEndTime) {
myCount = 0;
}
myCount++;
myEndTime = System.currentTimeMillis() + DURATION_TIME;
}
public void decrement() {
if (myCount <= 0) {
return;
}
myCount--;
}
public boolean isAllowed() {
return myCount <= 0 || System.currentTimeMillis() >= myEndTime;
}
}
@Override
public void performActionWithFormatterDisabled(final Runnable r) {
performActionWithFormatterDisabled(new Computable<Object>() {
@Override
public Object compute() {
r.run();
return null;
}
});
}
@Override
public <T extends Throwable> void performActionWithFormatterDisabled(final ThrowableRunnable<T> r) throws T {
final Throwable[] throwable = new Throwable[1];
performActionWithFormatterDisabled(new Computable<Object>() {
@Override
public Object compute() {
try {
r.run();
}
catch (Throwable t) {
throwable[0] = t;
}
return null;
}
});
if (throwable[0] != null) {
//noinspection unchecked
throw (T)throwable[0];
}
}
@Override
public <T> T performActionWithFormatterDisabled(final Computable<T> r) {
return ((FormatterImpl)FormatterEx.getInstance()).runWithFormattingDisabled(new Computable<T>() {
@Override
public T compute() {
final PostprocessReformattingAspect component = PostprocessReformattingAspect.getInstance(getProject());
return component.disablePostprocessFormattingInside(r);
}
});
}
private static class RangeFormatInfo{
public final SmartPsiElementPointer startPointer;
public final SmartPsiElementPointer endPointer;
public final boolean fromStart;
public final boolean toEnd;
RangeFormatInfo(@Nullable SmartPsiElementPointer startPointer,
@Nullable SmartPsiElementPointer endPointer,
boolean fromStart,
boolean toEnd)
{
this.startPointer = startPointer;
this.endPointer = endPointer;
this.fromStart = fromStart;
this.toEnd = toEnd;
}
}
}
| platform/lang-impl/src/com/intellij/psi/impl/source/codeStyle/CodeStyleManagerImpl.java | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.codeStyle;
import com.intellij.formatting.*;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.lang.*;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.Indent;
import com.intellij.psi.impl.CheckUtil;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.tree.*;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.util.CharTable;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class CodeStyleManagerImpl extends CodeStyleManager {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.codeStyle.CodeStyleManagerImpl");
private static final ThreadLocal<ProcessingUnderProgressInfo> SEQUENTIAL_PROCESSING_ALLOWED
= new ThreadLocal<ProcessingUnderProgressInfo>()
{
@Override
protected ProcessingUnderProgressInfo initialValue() {
return new ProcessingUnderProgressInfo();
}
};
private final Project myProject;
@NonNls private static final String DUMMY_IDENTIFIER = "xxx";
public CodeStyleManagerImpl(Project project) {
myProject = project;
}
@Override
@NotNull
public Project getProject() {
return myProject;
}
@Override
@NotNull
public PsiElement reformat(@NotNull PsiElement element) throws IncorrectOperationException {
return reformat(element, false);
}
@Override
@NotNull
public PsiElement reformat(@NotNull PsiElement element, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException {
CheckUtil.checkWritable(element);
if( !SourceTreeToPsiMap.hasTreeElement( element ) )
{
return element;
}
ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(element);
final PsiElement formatted = SourceTreeToPsiMap.treeElementToPsi(new CodeFormatterFacade(getSettings()).processElement(treeElement));
if (!canChangeWhiteSpacesOnly) {
return postProcessElement(formatted);
} else {
return formatted;
}
}
private PsiElement postProcessElement(@NotNull final PsiElement formatted) {
PsiElement result = formatted;
for (PostFormatProcessor postFormatProcessor : Extensions.getExtensions(PostFormatProcessor.EP_NAME)) {
result = postFormatProcessor.processElement(result, getSettings());
}
return result;
}
private void postProcessText(@NotNull final PsiFile file, @NotNull final TextRange textRange) {
TextRange currentRange = textRange;
for (final PostFormatProcessor myPostFormatProcessor : Extensions.getExtensions(PostFormatProcessor.EP_NAME)) {
currentRange = myPostFormatProcessor.processText(file, currentRange, getSettings());
}
}
@Override
public PsiElement reformatRange(@NotNull PsiElement element,
int startOffset,
int endOffset,
boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException {
return reformatRangeImpl(element, startOffset, endOffset, canChangeWhiteSpacesOnly);
}
@Override
public PsiElement reformatRange(@NotNull PsiElement element, int startOffset, int endOffset)
throws IncorrectOperationException {
return reformatRangeImpl(element, startOffset, endOffset, false);
}
private static void transformAllChildren(final ASTNode file) {
((TreeElement)file).acceptTree(new RecursiveTreeElementWalkingVisitor() {
});
}
@Override
public void reformatText(@NotNull PsiFile file, int startOffset, int endOffset) throws IncorrectOperationException {
reformatText(file, Collections.singleton(new TextRange(startOffset, endOffset)));
}
@Override
public void reformatText(@NotNull PsiFile file, @NotNull Collection<TextRange> ranges) throws IncorrectOperationException {
if (ranges.isEmpty()) {
return;
}
ApplicationManager.getApplication().assertWriteAccessAllowed();
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
CheckUtil.checkWritable(file);
if (!SourceTreeToPsiMap.hasTreeElement(file)) {
return;
}
ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(file);
transformAllChildren(treeElement);
final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(getSettings());
LOG.assertTrue(file.isValid());
Editor editor = PsiUtilBase.findEditor(file);
// There is a possible case that cursor is located at the end of the line that contains only white spaces. For example:
// public void foo() {
// <caret>
// }
// Formatter removes such white spaces, i.e. keeps only line feed symbol. But we want to preserve caret position then.
// So, we check if it should be preserved and restore it after formatting if necessary
int visualColumnToRestore = -1;
if (editor != null) {
Document document = editor.getDocument();
int caretOffset = editor.getCaretModel().getOffset();
caretOffset = Math.max(Math.min(caretOffset, document.getTextLength() - 1), 0);
CharSequence text = document.getCharsSequence();
int caretLine = document.getLineNumber(caretOffset);
int lineStartOffset = document.getLineStartOffset(caretLine);
int lineEndOffset = document.getLineEndOffset(caretLine);
boolean fixCaretPosition = true;
for (int i = lineStartOffset; i < lineEndOffset; i++) {
char c = text.charAt(i);
if (c != ' ' && c != '\t' && c != '\n') {
fixCaretPosition = false;
break;
}
}
if (fixCaretPosition) {
visualColumnToRestore = editor.getCaretModel().getVisualPosition().column;
}
}
final SmartPointerManager smartPointerManager = SmartPointerManager.getInstance(getProject());
List<RangeFormatInfo> infos = new ArrayList<RangeFormatInfo>();
for (TextRange range : ranges) {
final PsiElement start = findElementInTreeWithFormatterEnabled(file, range.getStartOffset());
final PsiElement end = findElementInTreeWithFormatterEnabled(file, range.getEndOffset());
if (start != null && !start.isValid()) {
LOG.error("start=" + start + "; file=" + file);
}
if (end != null && !end.isValid()) {
LOG.error("end=" + start + "; end=" + file);
}
boolean formatFromStart = range.getStartOffset() == 0;
boolean formatToEnd = range.getEndOffset() == file.getTextLength();
infos.add(new RangeFormatInfo(
start == null ? null : smartPointerManager.createSmartPsiElementPointer(start),
end == null ? null : smartPointerManager.createSmartPsiElementPointer(end),
formatFromStart,
formatToEnd
));
}
FormatTextRanges formatRanges = new FormatTextRanges();
for (TextRange range : ranges) {
formatRanges.add(range, true);
}
codeFormatter.processText(file, formatRanges, true);
for (RangeFormatInfo info : infos) {
final PsiElement startElement = info.startPointer == null ? null : info.startPointer.getElement();
final PsiElement endElement = info.endPointer == null ? null : info.endPointer.getElement();
if ((startElement != null || info.fromStart) && (endElement != null || info.toEnd)) {
postProcessText(file, new TextRange(info.fromStart ? 0 : startElement.getTextRange().getStartOffset(),
info.toEnd ? file.getTextLength() : endElement.getTextRange().getEndOffset()));
}
}
if (editor == null) {
return;
}
if (visualColumnToRestore < 0) {
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
return;
}
CaretModel caretModel = editor.getCaretModel();
VisualPosition position = caretModel.getVisualPosition();
if (visualColumnToRestore != position.column) {
caretModel.moveToVisualPosition(new VisualPosition(position.line, visualColumnToRestore));
}
}
private PsiElement reformatRangeImpl(final PsiElement element,
final int startOffset,
final int endOffset,
boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException {
LOG.assertTrue(element.isValid());
CheckUtil.checkWritable(element);
if( !SourceTreeToPsiMap.hasTreeElement( element ) )
{
return element;
}
ASTNode treeElement = SourceTreeToPsiMap.psiElementToTree(element);
final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(getSettings());
final PsiElement formatted = SourceTreeToPsiMap.treeElementToPsi(codeFormatter.processRange(treeElement, startOffset, endOffset));
return canChangeWhiteSpacesOnly ? formatted : postProcessElement(formatted);
}
@Override
public void reformatNewlyAddedElement(@NotNull final ASTNode parent, @NotNull final ASTNode addedElement) throws IncorrectOperationException {
LOG.assertTrue(addedElement.getTreeParent() == parent, "addedElement must be added to parent");
final PsiElement psiElement = parent.getPsi();
PsiFile containingFile = psiElement.getContainingFile();
final FileViewProvider fileViewProvider = containingFile.getViewProvider();
if (fileViewProvider instanceof MultiplePsiFilesPerDocumentFileViewProvider) {
containingFile = fileViewProvider.getPsi(fileViewProvider.getBaseLanguage());
}
TextRange textRange = addedElement.getTextRange();
final Document document = fileViewProvider.getDocument();
if (document instanceof DocumentWindow) {
containingFile = InjectedLanguageManager.getInstance(containingFile.getProject()).getTopLevelFile(containingFile);
textRange = ((DocumentWindow)document).injectedToHost(textRange);
}
final FormattingModelBuilder builder = LanguageFormatting.INSTANCE.forContext(containingFile);
if (builder != null) {
final FormattingModel model = CoreFormatterUtil.buildModel(builder, containingFile, getSettings(), FormattingMode.REFORMAT);
FormatterEx.getInstanceEx().formatAroundRange(model, getSettings(), textRange, containingFile.getFileType());
}
adjustLineIndent(containingFile, textRange);
}
@Override
public int adjustLineIndent(@NotNull final PsiFile file, final int offset) throws IncorrectOperationException {
return PostprocessReformattingAspect.getInstance(file.getProject()).disablePostprocessFormattingInside(new Computable<Integer>() {
@Override
public Integer compute() {
return doAdjustLineIndentByOffset(file, offset);
}
});
}
@Nullable
static PsiElement findElementInTreeWithFormatterEnabled(final PsiFile file, final int offset) {
final PsiElement bottomost = file.findElementAt(offset);
if (bottomost != null && LanguageFormatting.INSTANCE.forContext(bottomost) != null){
return bottomost;
}
final Language fileLang = file.getLanguage();
if (fileLang instanceof CompositeLanguage) {
return file.getViewProvider().findElementAt(offset, fileLang);
}
return bottomost;
}
@Override
public int adjustLineIndent(@NotNull final Document document, final int offset) {
return PostprocessReformattingAspect.getInstance(getProject()).disablePostprocessFormattingInside(new Computable<Integer>() {
@Override
public Integer compute() {
final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject);
documentManager.commitDocument(document);
PsiFile file = documentManager.getPsiFile(document);
if (file == null) return offset;
return doAdjustLineIndentByOffset(file, offset);
}
});
}
private int doAdjustLineIndentByOffset(@NotNull PsiFile file, int offset) {
return new CodeStyleManagerRunnable<Integer>(this, FormattingMode.ADJUST_INDENT) {
@Override
protected Integer doPerform(int offset, TextRange range) {
return FormatterEx.getInstanceEx().adjustLineIndent(myModel, mySettings, myIndentOptions, offset, mySignificantRange);
}
@Override
protected Integer computeValueInsidePlainComment(PsiFile file, int offset, Integer defaultValue) {
return CharArrayUtil.shiftForward(file.getViewProvider().getContents(), offset, " \t");
}
@Override
protected Integer adjustResultForInjected(Integer result, DocumentWindow documentWindow) {
return documentWindow.hostToInjected(result);
}
}.perform(file, offset, null, offset);
}
@Override
public void adjustLineIndent(@NotNull PsiFile file, TextRange rangeToAdjust) throws IncorrectOperationException {
new CodeStyleManagerRunnable<Object>(this, FormattingMode.ADJUST_INDENT) {
@Override
protected Object doPerform(int offset, TextRange range) {
FormatterEx.getInstanceEx().adjustLineIndentsForRange(myModel, mySettings, myIndentOptions, range);
return null;
}
}.perform(file, -1, rangeToAdjust, null);
}
@Override
@Nullable
public String getLineIndent(@NotNull PsiFile file, int offset) {
return new CodeStyleManagerRunnable<String>(this, FormattingMode.ADJUST_INDENT) {
@Override
protected boolean useDocumentBaseFormattingModel() {
return false;
}
@Override
protected String doPerform(int offset, TextRange range) {
return FormatterEx.getInstanceEx().getLineIndent(myModel, mySettings, myIndentOptions, offset, mySignificantRange);
}
}.perform(file, offset, null, null);
}
@Override
@Nullable
public String getLineIndent(@NotNull Document document, int offset) {
PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(document);
if (file == null) return "";
return getLineIndent(file, offset);
}
@Override
public boolean isLineToBeIndented(@NotNull PsiFile file, int offset) {
if (!SourceTreeToPsiMap.hasTreeElement(file)) {
return false;
}
CharSequence chars = file.getViewProvider().getContents();
int start = CharArrayUtil.shiftBackward(chars, offset - 1, " \t");
if (start > 0 && chars.charAt(start) != '\n' && chars.charAt(start) != '\r') {
return false;
}
int end = CharArrayUtil.shiftForward(chars, offset, " \t");
if (end >= chars.length()) {
return false;
}
ASTNode element = SourceTreeToPsiMap.psiElementToTree(findElementInTreeWithFormatterEnabled(file, end));
if (element == null) {
return false;
}
if (element.getElementType() == TokenType.WHITE_SPACE) {
return false;
}
if (element.getElementType() == PlainTextTokenTypes.PLAIN_TEXT) {
return false;
}
/*
if( element.getElementType() instanceof IJspElementType )
{
return false;
}
*/
if (getSettings().KEEP_FIRST_COLUMN_COMMENT && isCommentToken(element)) {
if (IndentHelper.getInstance().getIndent(myProject, file.getFileType(), element, true) == 0) {
return false;
}
}
return true;
}
private static boolean isCommentToken(final ASTNode element) {
final Language language = element.getElementType().getLanguage();
final Commenter commenter = LanguageCommenters.INSTANCE.forLanguage(language);
if (commenter instanceof CodeDocumentationAwareCommenter) {
final CodeDocumentationAwareCommenter documentationAwareCommenter = (CodeDocumentationAwareCommenter)commenter;
return element.getElementType() == documentationAwareCommenter.getBlockCommentTokenType() ||
element.getElementType() == documentationAwareCommenter.getLineCommentTokenType();
}
return false;
}
private static boolean isWhiteSpaceSymbol(char c) {
return c == ' ' || c == '\t' || c == '\n';
}
/**
* Formatter trims line that contains white spaces symbols only, however, there is a possible case that we want
* to preserve them for particular line (e.g. for live template that defines blank line that contains $END$ marker).
* <p/>
* Current approach is to do the following:
* <pre>
* <ol>
* <li>Insert dummy text at the end of the blank line which white space symbols should be preserved;</li>
* <li>Perform formatting;</li>
* <li>Remove dummy text;</li>
* </ol>
* </pre>
* <p/>
* This method inserts that dummy text if necessary (if target line contains white space symbols only).
* <p/>
* Please note that it tries to do that via PSI at first (checks if given offset points to
* {@link TokenType#WHITE_SPACE white space element} and inserts dummy text as dedicated element if necessary) and,
* in case of the negative answer, tries to perform the examination considering document just as a sequence of characters
* and assuming that white space symbols are white spaces, tabulations and line feeds. The rationale for such an approach is:
* <pre>
* <ul>
* <li>
* there is a possible case that target language considers symbols over than white spaces, tabulations and line feeds
* to be white spaces and the answer lays at PSI structure of the file;
* </li>
* <li>
* dummy text inserted during PSI-based processing has {@link TokenType#NEW_LINE_INDENT special type} that may be treated
* specifically during formatting;
* </li>
* </ul>
* </pre>
* <p/>
* <b>Note:</b> it's expected that the whole white space region that contains given offset is processed in a way that all
* {@link RangeMarker range markers} registered for the given offset are expanded to the whole white space region.
* E.g. there is a possible case that particular range marker serves for defining formatting range, hence, its start/end offsets
* are updated correspondingly after current method call and whole white space region is reformatted.
*
* @param file target PSI file
* @param document target document
* @param offset offset that defines end boundary of the target line text fragment (start boundary is the first line's symbol)
* @return text range that points to the newly inserted dummy text if any; <code>null</code> otherwise
* @throws IncorrectOperationException if given file is read-only
*/
@Nullable
public static TextRange insertNewLineIndentMarker(@NotNull PsiFile file, @NotNull Document document, int offset)
throws IncorrectOperationException
{
TextRange result = insertNewLineIndentMarker(file, offset);
if (result == null) {
result = insertNewLineIndentMarker(document, offset);
}
return result;
}
@Nullable
private static TextRange insertNewLineIndentMarker(@NotNull PsiFile file, int offset) throws IncorrectOperationException {
CheckUtil.checkWritable(file);
final Pair<PsiElement, CharTable> pair = doFindWhiteSpaceNode(file, offset);
PsiElement element = pair.first;
if (element == null) {
return null;
}
ASTNode node = SourceTreeToPsiMap.psiElementToTree(element);
if (node == null) {
return null;
}
ASTNode parent = node.getTreeParent();
int elementStart = element.getTextRange().getStartOffset();
int rangeShift = 0;
if (element.getContainingFile() != null) {
// Map injected element offset to the real file offset.
rangeShift = InjectedLanguageManager.getInstance(file.getProject()).injectedToHost(element, elementStart) - elementStart;
elementStart += rangeShift;
}
if (elementStart > offset) {
return null;
}
// We don't want to insert a marker if target line is not blank (doesn't consist from white space symbols only).
if (offset == elementStart) {
for (ASTNode prev = TreeUtil.prevLeaf(node); ; prev = TreeUtil.prevLeaf(prev)) {
if (prev == null) {
return null;
}
if (prev.getTextRange().isEmpty()) {
continue;
}
if (prev.getElementType() != TokenType.WHITE_SPACE) {
return null;
}
}
}
CharTable charTable = pair.second;
ASTNode marker;
// The thing is that we have a sub-system that monitors tree changes and marks newly generated elements for postponed
// formatting (PostprocessReformattingAspect). In case of injected context that results in marking whole injected region
// in case its sub-range is changed.
//
// We want to avoid that here, so, temporarily suppress that functionality.
CodeEditUtil.setAllowSuspendNodesReformatting(false);
try {
ASTNode space1 = splitSpaceElement((TreeElement)element, offset - elementStart, charTable);
marker = Factory.createSingleLeafElement(TokenType.NEW_LINE_INDENT, DUMMY_IDENTIFIER, charTable, file.getManager());
setSequentialProcessingAllowed(false);
parent.addChild(marker, space1.getTreeNext());
}
finally {
CodeEditUtil.setAllowSuspendNodesReformatting(true);
}
PsiElement psiElement = SourceTreeToPsiMap.treeElementToPsi(marker);
return psiElement == null ? null : psiElement.getTextRange().shiftRight(rangeShift);
}
@Nullable
private static TextRange insertNewLineIndentMarker(@NotNull Document document, final int offset) {
CharSequence text = document.getCharsSequence();
if (offset < 0 || offset >= text.length() || !isWhiteSpaceSymbol(text.charAt(offset))) {
return null;
}
int start = offset;
for (int i = offset - 1; i >= 0; i--) {
char c = text.charAt(i);
// We don't want to insert a marker if target line is not blank (doesn't consist from white space symbols only).
if (c == '\n') {
break;
}
if (!isWhiteSpaceSymbol(c)) {
return null;
}
start = i;
}
int end = offset;
for (; end < text.length(); end++) {
if (!isWhiteSpaceSymbol(text.charAt(end))) {
break;
}
}
StringBuilder buffer = new StringBuilder();
buffer.append(text.subSequence(start, end));
// Modify the document in order to expand range markers pointing to the given offset to the whole white space range.
document.deleteString(start, end);
document.insertString(start, buffer);
setSequentialProcessingAllowed(false);
document.insertString(offset, DUMMY_IDENTIFIER);
return new TextRange(offset, offset + DUMMY_IDENTIFIER.length());
}
/**
* Allows to check if given offset points to white space element within the given PSI file and return that white space
* element in the case of positive answer.
*
* @param file target file
* @param offset offset that might point to white space element within the given PSI file
* @return target white space element for the given offset within the given file (if any); <code>null</code> otherwise
*/
@Nullable
public static PsiElement findWhiteSpaceNode(@NotNull PsiFile file, int offset) {
return doFindWhiteSpaceNode(file, offset).first;
}
@NotNull
private static Pair<PsiElement, CharTable> doFindWhiteSpaceNode(@NotNull PsiFile file, int offset) {
ASTNode astNode = SourceTreeToPsiMap.psiElementToTree(file);
if (!(astNode instanceof FileElement)) {
return new Pair<PsiElement, CharTable>(null, null);
}
PsiElement elementAt = InjectedLanguageUtil.findInjectedElementNoCommit(file, offset);
final CharTable charTable = ((FileElement)astNode).getCharTable();
if (elementAt == null) {
elementAt = findElementInTreeWithFormatterEnabled(file, offset);
}
if( elementAt == null) {
return new Pair<PsiElement, CharTable>(null, charTable);
}
ASTNode node = elementAt.getNode();
if (node == null || node.getElementType() != TokenType.WHITE_SPACE) {
return new Pair<PsiElement, CharTable>(null, charTable);
}
return new Pair<PsiElement, CharTable>(elementAt, charTable);
}
@Override
public Indent getIndent(String text, FileType fileType) {
int indent = IndentHelperImpl.getIndent(myProject, fileType, text, true);
int indenLevel = indent / IndentHelperImpl.INDENT_FACTOR;
int spaceCount = indent - indenLevel * IndentHelperImpl.INDENT_FACTOR;
return new IndentImpl(getSettings(), indenLevel, spaceCount, fileType);
}
@Override
public String fillIndent(Indent indent, FileType fileType) {
IndentImpl indent1 = (IndentImpl)indent;
int indentLevel = indent1.getIndentLevel();
int spaceCount = indent1.getSpaceCount();
if (indentLevel < 0) {
spaceCount += indentLevel * getSettings().getIndentSize(fileType);
indentLevel = 0;
if (spaceCount < 0) {
spaceCount = 0;
}
}
else {
if (spaceCount < 0) {
int v = (-spaceCount + getSettings().getIndentSize(fileType) - 1) / getSettings().getIndentSize(fileType);
indentLevel -= v;
spaceCount += v * getSettings().getIndentSize(fileType);
if (indentLevel < 0) {
indentLevel = 0;
}
}
}
return IndentHelperImpl.fillIndent(myProject, fileType, indentLevel * IndentHelperImpl.INDENT_FACTOR + spaceCount);
}
@Override
public Indent zeroIndent() {
return new IndentImpl(getSettings(), 0, 0, null);
}
private static ASTNode splitSpaceElement(TreeElement space, int offset, CharTable charTable) {
LOG.assertTrue(space.getElementType() == TokenType.WHITE_SPACE);
CharSequence chars = space.getChars();
LeafElement space1 = Factory.createSingleLeafElement(TokenType.WHITE_SPACE, chars, 0, offset, charTable, SharedImplUtil.getManagerByTree(space));
LeafElement space2 = Factory.createSingleLeafElement(TokenType.WHITE_SPACE, chars, offset, chars.length(), charTable, SharedImplUtil.getManagerByTree(space));
ASTNode parent = space.getTreeParent();
parent.replaceChild(space, space1);
parent.addChild(space2, space1.getTreeNext());
return space1;
}
@NotNull
private CodeStyleSettings getSettings() {
return CodeStyleSettingsManager.getSettings(myProject);
}
@Override
public boolean isSequentialProcessingAllowed() {
return SEQUENTIAL_PROCESSING_ALLOWED.get().isAllowed();
}
/**
* Allows to define if {@link #isSequentialProcessingAllowed() sequential processing} should be allowed.
* <p/>
* Current approach is not allow to stop sequential processing for more than predefine amount of time (couple of seconds).
* That means that call to this method with <code>'true'</code> argument is not mandatory for successful processing even
* if this method is called with <code>'false'</code> argument before.
*
* @param allowed flag that defines if {@link #isSequentialProcessingAllowed() sequential processing} should be allowed
*/
public static void setSequentialProcessingAllowed(boolean allowed) {
ProcessingUnderProgressInfo info = SEQUENTIAL_PROCESSING_ALLOWED.get();
if (allowed) {
info.decrement();
}
else {
info.increment();
}
}
private static class ProcessingUnderProgressInfo {
private static final long DURATION_TIME = TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS);
private int myCount;
private long myEndTime;
public void increment() {
if (myCount > 0 && System.currentTimeMillis() > myEndTime) {
myCount = 0;
}
myCount++;
myEndTime = System.currentTimeMillis() + DURATION_TIME;
}
public void decrement() {
if (myCount <= 0) {
return;
}
myCount--;
}
public boolean isAllowed() {
return myCount <= 0 || System.currentTimeMillis() >= myEndTime;
}
}
@Override
public void performActionWithFormatterDisabled(final Runnable r) {
performActionWithFormatterDisabled(new Computable<Object>() {
@Override
public Object compute() {
r.run();
return null;
}
});
}
@Override
public <T extends Throwable> void performActionWithFormatterDisabled(final ThrowableRunnable<T> r) throws T {
final Throwable[] throwable = new Throwable[1];
performActionWithFormatterDisabled(new Computable<Object>() {
@Override
public Object compute() {
try {
r.run();
}
catch (Throwable t) {
throwable[0] = t;
}
return null;
}
});
if (throwable[0] != null) {
//noinspection unchecked
throw (T)throwable[0];
}
}
@Override
public <T> T performActionWithFormatterDisabled(final Computable<T> r) {
return ((FormatterImpl)FormatterEx.getInstance()).runWithFormattingDisabled(new Computable<T>() {
@Override
public T compute() {
final PostprocessReformattingAspect component = PostprocessReformattingAspect.getInstance(getProject());
return component.disablePostprocessFormattingInside(r);
}
});
}
private static class RangeFormatInfo{
public final SmartPsiElementPointer startPointer;
public final SmartPsiElementPointer endPointer;
public final boolean fromStart;
public final boolean toEnd;
RangeFormatInfo(@Nullable SmartPsiElementPointer startPointer,
@Nullable SmartPsiElementPointer endPointer,
boolean fromStart,
boolean toEnd)
{
this.startPointer = startPointer;
this.endPointer = endPointer;
this.fromStart = fromStart;
this.toEnd = toEnd;
}
}
}
| reformatText — explicitly specified editor
| platform/lang-impl/src/com/intellij/psi/impl/source/codeStyle/CodeStyleManagerImpl.java | reformatText — explicitly specified editor |
|
Java | apache-2.0 | 43d52e5784f70bd093a61afab768aefacf32da2b | 0 | esaunders/autopsy,esaunders/autopsy,rcordovano/autopsy,wschaeferB/autopsy,rcordovano/autopsy,rcordovano/autopsy,rcordovano/autopsy,rcordovano/autopsy,esaunders/autopsy,wschaeferB/autopsy,esaunders/autopsy,wschaeferB/autopsy,wschaeferB/autopsy,rcordovano/autopsy,esaunders/autopsy,wschaeferB/autopsy | /*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.report.modules.portablecase;
import org.sleuthkit.autopsy.report.ReportModule;
import java.util.logging.Level;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.file.Paths;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.openide.modules.InstalledFileLocator;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.contentviewertags.ContentViewerTagManager;
import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.coreutils.FileTypeUtils.FileTypeCategory;
import org.sleuthkit.autopsy.report.ReportProgressPanel;
import org.sleuthkit.autopsy.report.modules.caseuco.CaseUcoFormatExporter;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.CaseDbAccessManager;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LocalFilesDataSource;
import org.sleuthkit.datamodel.Pool;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem;
/**
* Creates a portable case from tagged files
*/
public class PortableCaseReportModule implements ReportModule {
private static final Logger logger = Logger.getLogger(PortableCaseReportModule.class.getName());
private static final String FILE_FOLDER_NAME = "PortableCaseFiles"; // NON-NLS
private static final String UNKNOWN_FILE_TYPE_FOLDER = "Other"; // NON-NLS
private static final String MAX_ID_TABLE_NAME = "portable_case_max_ids"; // NON-NLS
private PortableCaseReportModuleSettings settings;
// These are the types for the exported file subfolders
private static final List<FileTypeCategory> FILE_TYPE_CATEGORIES = Arrays.asList(FileTypeCategory.AUDIO, FileTypeCategory.DOCUMENTS,
FileTypeCategory.EXECUTABLE, FileTypeCategory.IMAGE, FileTypeCategory.VIDEO);
private Case currentCase = null;
private SleuthkitCase portableSkCase = null;
private String caseName = "";
private File caseFolder = null;
private File copiedFilesFolder = null;
// Maps old object ID from current case to new object in portable case
private final Map<Long, Content> oldIdToNewContent = new HashMap<>();
// Maps new object ID to the new object
private final Map<Long, Content> newIdToContent = new HashMap<>();
// Maps old TagName to new TagName
private final Map<TagName, TagName> oldTagNameToNewTagName = new HashMap<>();
// Map of old artifact type ID to new artifact type ID. There will only be changes if custom artifact types are present.
private final Map<Integer, Integer> oldArtTypeIdToNewArtTypeId = new HashMap<>();
// Map of old attribute type ID to new attribute type ID. There will only be changes if custom attr types are present.
private final Map<Integer, BlackboardAttribute.Type> oldAttrTypeIdToNewAttrType = new HashMap<>();
// Map of old artifact ID to new artifact
private final Map<Long, BlackboardArtifact> oldArtifactIdToNewArtifact = new HashMap<>();
public PortableCaseReportModule() {
}
@NbBundle.Messages({
"PortableCaseReportModule.getName.name=Portable Case"
})
@Override
public String getName() {
return Bundle.PortableCaseReportModule_getName_name();
}
@NbBundle.Messages({
"PortableCaseReportModule.getDescription.description=Copies selected items to a new single-user case that can be easily shared"
})
@Override
public String getDescription() {
return Bundle.PortableCaseReportModule_getDescription_description();
}
@Override
public String getRelativeFilePath() {
try {
caseName = Case.getCurrentCaseThrows().getDisplayName() + " (Portable)"; // NON-NLS
} catch (NoCurrentCaseException ex) {
// a case may not be open yet
return "";
}
return caseName;
}
/**
* Convenience method for handling cancellation
*
* @param progressPanel The report progress panel
*/
private void handleCancellation(ReportProgressPanel progressPanel) {
logger.log(Level.INFO, "Portable case creation canceled by user"); // NON-NLS
progressPanel.setIndeterminate(false);
progressPanel.complete(ReportProgressPanel.ReportStatus.CANCELED);
cleanup();
}
/**
* Convenience method to avoid code duplication.
* Assumes that if an exception is supplied then the error is SEVERE. Otherwise
* it is logged as a WARNING.
*
* @param logWarning Warning to write to the log
* @param dialogWarning Warning to write to a pop-up window
* @param ex The exception (can be null)
* @param progressPanel The report progress panel
*/
private void handleError(String logWarning, String dialogWarning, Exception ex, ReportProgressPanel progressPanel) {
if (ex == null) {
logger.log(Level.WARNING, logWarning);
} else {
logger.log(Level.SEVERE, logWarning, ex);
}
progressPanel.setIndeterminate(false);
progressPanel.complete(ReportProgressPanel.ReportStatus.ERROR, dialogWarning);
cleanup();
}
@NbBundle.Messages({
"PortableCaseReportModule.generateReport.verifying=Verifying selected parameters...",
"PortableCaseReportModule.generateReport.creatingCase=Creating portable case database...",
"PortableCaseReportModule.generateReport.copyingTags=Copying tags...",
"# {0} - tag name",
"PortableCaseReportModule.generateReport.copyingFiles=Copying files tagged as {0}...",
"# {0} - tag name",
"PortableCaseReportModule.generateReport.copyingArtifacts=Copying artifacts tagged as {0}...",
"# {0} - output folder",
"PortableCaseReportModule.generateReport.outputDirDoesNotExist=Output folder {0} does not exist",
"# {0} - output folder",
"PortableCaseReportModule.generateReport.outputDirIsNotDir=Output folder {0} is not a folder",
"PortableCaseReportModule.generateReport.caseClosed=Current case has been closed",
"PortableCaseReportModule.generateReport.interestingItemError=Error loading intersting items",
"PortableCaseReportModule.generateReport.errorReadingTags=Error while reading tags from case database",
"PortableCaseReportModule.generateReport.errorReadingSets=Error while reading interesting items sets from case database",
"PortableCaseReportModule.generateReport.noContentToCopy=No interesting files, results, or tagged items to copy",
"PortableCaseReportModule.generateReport.errorCopyingTags=Error copying tags",
"PortableCaseReportModule.generateReport.errorCopyingFiles=Error copying tagged files",
"PortableCaseReportModule.generateReport.errorCopyingArtifacts=Error copying tagged artifacts",
"PortableCaseReportModule.generateReport.errorCopyingInterestingFiles=Error copying interesting files",
"PortableCaseReportModule.generateReport.errorCopyingInterestingResults=Error copying interesting results",
"PortableCaseReportModule.generateReport.errorCreatingImageTagTable=Error creating image tags table",
"# {0} - attribute type name",
"PortableCaseReportModule.generateReport.errorLookingUpAttrType=Error looking up attribute type {0}",
"PortableCaseReportModule.generateReport.compressingCase=Compressing case...",
"PortableCaseReportModule.generateReport.errorCreatingReportFolder=Could not make report folder",
"PortableCaseReportModule.generateReport.errorGeneratingUCOreport=Problem while generating CASE-UCO report"
})
public void generateReport(String reportPath, PortableCaseReportModuleSettings options, ReportProgressPanel progressPanel) {
this.settings = options;
progressPanel.setIndeterminate(true);
progressPanel.start();
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_verifying());
// Clear out any old values
cleanup();
// Validate the input parameters
File outputDir = new File(reportPath);
if (! outputDir.exists()) {
handleError("Output folder " + outputDir.toString() + " does not exist",
Bundle.PortableCaseReportModule_generateReport_outputDirDoesNotExist(outputDir.toString()), null, progressPanel); // NON-NLS
return;
}
if (! outputDir.isDirectory()) {
handleError("Output folder " + outputDir.toString() + " is not a folder",
Bundle.PortableCaseReportModule_generateReport_outputDirIsNotDir(outputDir.toString()), null, progressPanel); // NON-NLS
return;
}
// Save the current case object
try {
currentCase = Case.getCurrentCaseThrows();
caseName = currentCase.getDisplayName() + " (Portable)"; // NON-NLS
} catch (NoCurrentCaseException ex) {
handleError("Current case has been closed",
Bundle.PortableCaseReportModule_generateReport_caseClosed(), null, progressPanel); // NON-NLS
return;
}
// Check that there will be something to copy
List<TagName> tagNames;
if (options.areAllTagsSelected()) {
try {
tagNames = Case.getCurrentCaseThrows().getServices().getTagsManager().getTagNamesInUse();
} catch (NoCurrentCaseException | TskCoreException ex) {
handleError("Unable to get all tags",
Bundle.PortableCaseReportModule_generateReport_errorReadingTags(), ex, progressPanel); // NON-NLS
return;
}
} else {
tagNames = options.getSelectedTagNames();
}
List<String> setNames;
if (options.areAllSetsSelected()) {
try {
setNames = getAllInterestingItemsSets();
} catch (NoCurrentCaseException | TskCoreException ex) {
handleError("Unable to get all interesting items sets",
Bundle.PortableCaseReportModule_generateReport_errorReadingSets(), ex, progressPanel); // NON-NLS
return;
}
} else {
setNames = options.getSelectedSetNames();
}
if (tagNames.isEmpty() && setNames.isEmpty()) {
handleError("No content to copy",
Bundle.PortableCaseReportModule_generateReport_noContentToCopy(), null, progressPanel); // NON-NLS
return;
}
// Create the case.
// portableSkCase and caseFolder will be set here.
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_creatingCase());
createCase(outputDir, progressPanel);
if (portableSkCase == null) {
// The error has already been handled
return;
}
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
// Set up the table for the image tags
try {
initializeImageTags(progressPanel);
} catch (TskCoreException ex) {
handleError("Error creating image tag table", Bundle.PortableCaseReportModule_generateReport_errorCreatingImageTagTable(), ex, progressPanel); // NON-NLS
return;
}
// Copy the selected tags
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_copyingTags());
try {
for(TagName tagName:tagNames) {
TagName newTagName = portableSkCase.addOrUpdateTagName(tagName.getDisplayName(), tagName.getDescription(), tagName.getColor(), tagName.getKnownStatus());
oldTagNameToNewTagName.put(tagName, newTagName);
}
} catch (TskCoreException ex) {
handleError("Error copying tags", Bundle.PortableCaseReportModule_generateReport_errorCopyingTags(), ex, progressPanel); // NON-NLS
return;
}
// Set up tracking to support any custom artifact or attribute types
for (BlackboardArtifact.ARTIFACT_TYPE type:BlackboardArtifact.ARTIFACT_TYPE.values()) {
oldArtTypeIdToNewArtTypeId.put(type.getTypeID(), type.getTypeID());
}
for (BlackboardAttribute.ATTRIBUTE_TYPE type:BlackboardAttribute.ATTRIBUTE_TYPE.values()) {
try {
oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getAttributeType(type.getLabel()));
} catch (TskCoreException ex) {
handleError("Error looking up attribute name " + type.getLabel(),
Bundle.PortableCaseReportModule_generateReport_errorLookingUpAttrType(type.getLabel()),
ex, progressPanel); // NON-NLS
}
}
// Copy the tagged files
try {
for(TagName tagName:tagNames) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_copyingFiles(tagName.getDisplayName()));
addFilesToPortableCase(tagName, progressPanel);
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
}
} catch (TskCoreException ex) {
handleError("Error copying tagged files", Bundle.PortableCaseReportModule_generateReport_errorCopyingFiles(), ex, progressPanel); // NON-NLS
return;
}
// Copy the tagged artifacts and associated files
try {
for(TagName tagName:tagNames) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_copyingArtifacts(tagName.getDisplayName()));
addArtifactsToPortableCase(tagName, progressPanel);
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
}
} catch (TskCoreException ex) {
handleError("Error copying tagged artifacts", Bundle.PortableCaseReportModule_generateReport_errorCopyingArtifacts(), ex, progressPanel); // NON-NLS
return;
}
// Copy interesting files and results
if (! setNames.isEmpty()) {
try {
List<BlackboardArtifact> interestingFiles = currentCase.getSleuthkitCase().getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
for (BlackboardArtifact art:interestingFiles) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
BlackboardAttribute setAttr = art.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
if (setNames.contains(setAttr.getValueString())) {
copyContentToPortableCase(art, progressPanel);
}
}
} catch (TskCoreException ex) {
handleError("Error copying interesting files", Bundle.PortableCaseReportModule_generateReport_errorCopyingInterestingFiles(), ex, progressPanel); // NON-NLS
return;
}
try {
List<BlackboardArtifact> interestingResults = currentCase.getSleuthkitCase().getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
for (BlackboardArtifact art:interestingResults) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
BlackboardAttribute setAttr = art.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
if (setNames.contains(setAttr.getValueString())) {
copyContentToPortableCase(art, progressPanel);
}
}
} catch (TskCoreException ex) {
handleError("Error copying interesting results", Bundle.PortableCaseReportModule_generateReport_errorCopyingInterestingResults(), ex, progressPanel); // NON-NLS
return;
}
}
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
File reportsFolder = Paths.get(caseFolder.toString(), "Reports").toFile();
if(!reportsFolder.mkdir()) {
handleError("Could not make report folder", Bundle.PortableCaseReportModule_generateReport_errorCreatingReportFolder(), null, progressPanel); // NON-NLS
return;
}
try {
CaseUcoFormatExporter.export(tagNames, setNames, reportsFolder, progressPanel);
} catch (IOException | SQLException | NoCurrentCaseException | TskCoreException ex) {
handleError("Problem while generating CASE-UCO report",
Bundle.PortableCaseReportModule_generateReport_errorGeneratingUCOreport(), ex, progressPanel); // NON-NLS
}
// Compress the case (if desired)
if (options.shouldCompress()) {
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_compressingCase());
boolean success = compressCase(progressPanel);
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
if (! success) {
// Errors have been handled already
return;
}
}
// Close the case connections and clear out the maps
cleanup();
progressPanel.complete(ReportProgressPanel.ReportStatus.COMPLETE);
}
private List<String> getAllInterestingItemsSets() throws NoCurrentCaseException, TskCoreException {
// Get the set names in use for the current case.
List<String> setNames = new ArrayList<>();
Map<String, Long> setCounts;
// There may not be a case open when configuring report modules for Command Line execution
// Get all SET_NAMEs from interesting item artifacts
String innerSelect = "SELECT (value_text) AS set_name FROM blackboard_attributes WHERE (artifact_type_id = '"
+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID() + "' OR artifact_type_id = '"
+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + "') AND attribute_type_id = '"
+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + "'"; // NON-NLS
// Get the count of each SET_NAME
String query = "set_name, count(1) AS set_count FROM (" + innerSelect + ") set_names GROUP BY set_name"; // NON-NLS
GetInterestingItemSetNamesCallback callback = new GetInterestingItemSetNamesCallback();
Case.getCurrentCaseThrows().getSleuthkitCase().getCaseDbAccessManager().select(query, callback);
setCounts = callback.getSetCountMap();
setNames.addAll(setCounts.keySet());
return setNames;
}
/**
* Create the case directory and case database.
* portableSkCase will be set if this completes without error.
*
* @param outputDir The parent for the case folder
* @param progressPanel
*/
@NbBundle.Messages({
"# {0} - case folder",
"PortableCaseReportModule.createCase.caseDirExists=Case folder {0} already exists",
"PortableCaseReportModule.createCase.errorCreatingCase=Error creating case",
"# {0} - folder",
"PortableCaseReportModule.createCase.errorCreatingFolder=Error creating folder {0}",
"PortableCaseReportModule.createCase.errorStoringMaxIds=Error storing maximum database IDs",
})
private void createCase(File outputDir, ReportProgressPanel progressPanel) {
// Create the case folder
caseFolder = Paths.get(outputDir.toString(), caseName).toFile();
if (caseFolder.exists()) {
handleError("Case folder " + caseFolder.toString() + " already exists",
Bundle.PortableCaseReportModule_createCase_caseDirExists(caseFolder.toString()), null, progressPanel); // NON-NLS
return;
}
// Create the case
try {
portableSkCase = currentCase.createPortableCase(caseName, caseFolder);
} catch (TskCoreException ex) {
handleError("Error creating case " + caseName + " in folder " + caseFolder.toString(),
Bundle.PortableCaseReportModule_createCase_errorCreatingCase(), ex, progressPanel); // NON-NLS
return;
}
// Store the highest IDs
try {
saveHighestIds();
} catch (TskCoreException ex) {
handleError("Error storing maximum database IDs",
Bundle.PortableCaseReportModule_createCase_errorStoringMaxIds(), ex, progressPanel); // NON-NLS
return;
}
// Create the base folder for the copied files
copiedFilesFolder = Paths.get(caseFolder.toString(), FILE_FOLDER_NAME).toFile();
if (! copiedFilesFolder.mkdir()) {
handleError("Error creating folder " + copiedFilesFolder.toString(),
Bundle.PortableCaseReportModule_createCase_errorCreatingFolder(copiedFilesFolder.toString()), null, progressPanel); // NON-NLS
return;
}
// Create subfolders for the copied files
for (FileTypeCategory cat:FILE_TYPE_CATEGORIES) {
File subFolder = Paths.get(copiedFilesFolder.toString(), cat.getDisplayName()).toFile();
if (! subFolder.mkdir()) {
handleError("Error creating folder " + subFolder.toString(),
Bundle.PortableCaseReportModule_createCase_errorCreatingFolder(subFolder.toString()), null, progressPanel); // NON-NLS
return;
}
}
File unknownTypeFolder = Paths.get(copiedFilesFolder.toString(), UNKNOWN_FILE_TYPE_FOLDER).toFile();
if (! unknownTypeFolder.mkdir()) {
handleError("Error creating folder " + unknownTypeFolder.toString(),
Bundle.PortableCaseReportModule_createCase_errorCreatingFolder(unknownTypeFolder.toString()), null, progressPanel); // NON-NLS
return;
}
}
/**
* Save the current highest IDs to the portable case.
*
* @throws TskCoreException
*/
private void saveHighestIds() throws TskCoreException {
CaseDbAccessManager currentCaseDbManager = currentCase.getSleuthkitCase().getCaseDbAccessManager();
String tableSchema = "( table_name TEXT PRIMARY KEY, "
+ " max_id TEXT)"; // NON-NLS
portableSkCase.getCaseDbAccessManager().createTable(MAX_ID_TABLE_NAME, tableSchema);
currentCaseDbManager.select("max(obj_id) as max_id from tsk_objects", new StoreMaxIdCallback("tsk_objects")); // NON-NLS
currentCaseDbManager.select("max(tag_id) as max_id from content_tags", new StoreMaxIdCallback("content_tags")); // NON-NLS
currentCaseDbManager.select("max(tag_id) as max_id from blackboard_artifact_tags", new StoreMaxIdCallback("blackboard_artifact_tags")); // NON-NLS
currentCaseDbManager.select("max(examiner_id) as max_id from tsk_examiners", new StoreMaxIdCallback("tsk_examiners")); // NON-NLS
}
/**
* Set up the image tag table in the portable case
*
* @param progressPanel
*
* @throws TskCoreException
*/
private void initializeImageTags(ReportProgressPanel progressPanel) throws TskCoreException {
// Create the image tags table in the portable case
CaseDbAccessManager portableDbAccessManager = portableSkCase.getCaseDbAccessManager();
if (! portableDbAccessManager.tableExists(ContentViewerTagManager.TABLE_NAME)) {
portableDbAccessManager.createTable(ContentViewerTagManager.TABLE_NAME, ContentViewerTagManager.TABLE_SCHEMA_SQLITE);
}
}
/**
* Add all files with a given tag to the portable case.
*
* @param oldTagName The TagName object from the current case
* @param progressPanel The progress panel
*
* @throws TskCoreException
*/
private void addFilesToPortableCase(TagName oldTagName, ReportProgressPanel progressPanel) throws TskCoreException {
// Get all the tags in the current case
List<ContentTag> tags = currentCase.getServices().getTagsManager().getContentTagsByTagName(oldTagName);
// Copy the files into the portable case and tag
for (ContentTag tag : tags) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
return;
}
Content content = tag.getContent();
if (content instanceof AbstractFile) {
long newFileId = copyContentToPortableCase(content, progressPanel);
// Tag the file
if (! oldTagNameToNewTagName.containsKey(tag.getName())) {
throw new TskCoreException("TagName map is missing entry for ID " + tag.getName().getId() + " with display name " + tag.getName().getDisplayName()); // NON-NLS
}
ContentTag newContentTag = portableSkCase.addContentTag(newIdToContent.get(newFileId), oldTagNameToNewTagName.get(tag.getName()), tag.getComment(), tag.getBeginByteOffset(), tag.getEndByteOffset());
// Get the image tag data associated with this tag (empty string if there is none)
// and save it if present
String appData = getImageTagDataForContentTag(tag);
if (! appData.isEmpty()) {
addImageTagToPortableCase(newContentTag, appData);
}
}
}
}
/**
* Gets the image tag data for a given content tag
*
* @param tag The ContentTag in the current case
*
* @return The app_data string for this content tag or an empty string if there was none
*
* @throws TskCoreException
*/
private String getImageTagDataForContentTag(ContentTag tag) throws TskCoreException {
GetImageTagCallback callback = new GetImageTagCallback();
String query = "* FROM " + ContentViewerTagManager.TABLE_NAME + " WHERE content_tag_id = " + tag.getId();
currentCase.getSleuthkitCase().getCaseDbAccessManager().select(query, callback);
return callback.getAppData();
}
/**
* CaseDbAccessManager callback to get the app_data string for the image tag
*/
private static class GetImageTagCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback {
private static final Logger logger = Logger.getLogger(PortableCaseReportModule.class.getName());
private String appData = "";
@Override
public void process(ResultSet rs) {
try {
while (rs.next()) {
try {
appData = rs.getString("app_data"); // NON-NLS
} catch (SQLException ex) {
logger.log(Level.WARNING, "Unable to get app_data from result set", ex); // NON-NLS
}
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to get next result for app_data", ex); // NON-NLS
}
}
/**
* Get the app_data string
*
* @return the app_data string
*/
String getAppData() {
return appData;
}
}
/**
* Add an image tag to the portable case.
*
* @param newContentTag The content tag in the portable case
* @param appData The string to copy into app_data
*
* @throws TskCoreException
*/
private void addImageTagToPortableCase(ContentTag newContentTag, String appData) throws TskCoreException {
String insert = "(content_tag_id, app_data) VALUES (" + newContentTag.getId() + ", '" + appData + "')";
portableSkCase.getCaseDbAccessManager().insert(ContentViewerTagManager.TABLE_NAME, insert);
}
/**
* Add all artifacts with a given tag to the portable case.
*
* @param oldTagName The TagName object from the current case
* @param progressPanel The progress panel
*
* @throws TskCoreException
*/
private void addArtifactsToPortableCase(TagName oldTagName, ReportProgressPanel progressPanel) throws TskCoreException {
List<BlackboardArtifactTag> tags = currentCase.getServices().getTagsManager().getBlackboardArtifactTagsByTagName(oldTagName);
// Copy the artifacts into the portable case along with their content and tag
for (BlackboardArtifactTag tag : tags) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
return;
}
// Copy the source content
Content content = tag.getContent();
long newContentId = copyContentToPortableCase(content, progressPanel);
// Copy the artifact
BlackboardArtifact newArtifact = copyArtifact(newContentId, tag.getArtifact());
// Tag the artfiact
if (! oldTagNameToNewTagName.containsKey(tag.getName())) {
throw new TskCoreException("TagName map is missing entry for ID " + tag.getName().getId() + " with display name " + tag.getName().getDisplayName()); // NON-NLS
}
portableSkCase.addBlackboardArtifactTag(newArtifact, oldTagNameToNewTagName.get(tag.getName()), tag.getComment());
}
}
/**
* Copy an artifact into the new case. Will also copy any associated artifacts
*
* @param newContentId The content ID (in the portable case) of the source content
* @param artifactToCopy The artifact to copy
*
* @return The new artifact in the portable case
*
* @throws TskCoreException
*/
private BlackboardArtifact copyArtifact(long newContentId, BlackboardArtifact artifactToCopy) throws TskCoreException {
if (oldArtifactIdToNewArtifact.containsKey(artifactToCopy.getArtifactID())) {
return oldArtifactIdToNewArtifact.get(artifactToCopy.getArtifactID());
}
// First create the associated artifact (if present)
BlackboardAttribute oldAssociatedAttribute = artifactToCopy.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT));
List<BlackboardAttribute> newAttrs = new ArrayList<>();
if (oldAssociatedAttribute != null) {
BlackboardArtifact oldAssociatedArtifact = currentCase.getSleuthkitCase().getBlackboardArtifact(oldAssociatedAttribute.getValueLong());
BlackboardArtifact newAssociatedArtifact = copyArtifact(newContentId, oldAssociatedArtifact);
newAttrs.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT,
String.join(",", oldAssociatedAttribute.getSources()), newAssociatedArtifact.getArtifactID()));
}
// Create the new artifact
int newArtifactTypeId = getNewArtifactTypeId(artifactToCopy);
BlackboardArtifact newArtifact = portableSkCase.newBlackboardArtifact(newArtifactTypeId, newContentId);
List<BlackboardAttribute> oldAttrs = artifactToCopy.getAttributes();
// Copy over each attribute, making sure the type is in the new case.
for (BlackboardAttribute oldAttr:oldAttrs) {
// The associated artifact has already been handled
if (oldAttr.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT.getTypeID()) {
continue;
}
BlackboardAttribute.Type newAttributeType = getNewAttributeType(oldAttr);
switch (oldAttr.getValueType()) {
case BYTE:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueBytes()));
break;
case DOUBLE:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueDouble()));
break;
case INTEGER:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueInt()));
break;
case DATETIME:
case LONG:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueLong()));
break;
case STRING:
case JSON:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueString()));
break;
default:
throw new TskCoreException("Unexpected attribute value type found: " + oldAttr.getValueType().getLabel()); // NON-NLS
}
}
newArtifact.addAttributes(newAttrs);
oldArtifactIdToNewArtifact.put(artifactToCopy.getArtifactID(), newArtifact);
return newArtifact;
}
/**
* Get the artifact type ID in the portable case and create new artifact type if needed.
* For built-in artifacts this will be the same as the original.
*
* @param oldArtifact The artifact in the current case
*
* @return The corresponding artifact type ID in the portable case
*/
private int getNewArtifactTypeId(BlackboardArtifact oldArtifact) throws TskCoreException {
if (oldArtTypeIdToNewArtTypeId.containsKey(oldArtifact.getArtifactTypeID())) {
return oldArtTypeIdToNewArtTypeId.get(oldArtifact.getArtifactTypeID());
}
BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getArtifactType(oldArtifact.getArtifactTypeName());
try {
BlackboardArtifact.Type newCustomType = portableSkCase.addBlackboardArtifactType(oldCustomType.getTypeName(), oldCustomType.getDisplayName());
oldArtTypeIdToNewArtTypeId.put(oldArtifact.getArtifactTypeID(), newCustomType.getTypeID());
return newCustomType.getTypeID();
} catch (TskDataException ex) {
throw new TskCoreException("Error creating new artifact type " + oldCustomType.getTypeName(), ex); // NON-NLS
}
}
/**
* Get the attribute type ID in the portable case and create new attribute type if needed.
* For built-in attributes this will be the same as the original.
*
* @param oldAttribute The attribute in the current case
*
* @return The corresponding attribute type in the portable case
*/
private BlackboardAttribute.Type getNewAttributeType(BlackboardAttribute oldAttribute) throws TskCoreException {
BlackboardAttribute.Type oldAttrType = oldAttribute.getAttributeType();
if (oldAttrTypeIdToNewAttrType.containsKey(oldAttrType.getTypeID())) {
return oldAttrTypeIdToNewAttrType.get(oldAttrType.getTypeID());
}
try {
BlackboardAttribute.Type newCustomType = portableSkCase.addArtifactAttributeType(oldAttrType.getTypeName(),
oldAttrType.getValueType(), oldAttrType.getDisplayName());
oldAttrTypeIdToNewAttrType.put(oldAttribute.getAttributeType().getTypeID(), newCustomType);
return newCustomType;
} catch (TskDataException ex) {
throw new TskCoreException("Error creating new attribute type " + oldAttrType.getTypeName(), ex); // NON-NLS
}
}
/**
* Top level method to copy a content object to the portable case.
*
* @param content The content object to copy
* @param progressPanel The progress panel
*
* @return The object ID of the copied content in the portable case
*
* @throws TskCoreException
*/
@NbBundle.Messages({
"# {0} - File name",
"PortableCaseReportModule.copyContentToPortableCase.copyingFile=Copying file {0}",
})
private long copyContentToPortableCase(Content content, ReportProgressPanel progressPanel) throws TskCoreException {
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_copyContentToPortableCase_copyingFile(content.getUniquePath()));
return copyContent(content);
}
/**
* Returns the object ID for the given content object in the portable case.
*
* @param content The content object to copy into the portable case
*
* @return the new object ID for this content
*
* @throws TskCoreException
*/
private long copyContent(Content content) throws TskCoreException {
// Check if we've already copied this content
if (oldIdToNewContent.containsKey(content.getId())) {
return oldIdToNewContent.get(content.getId()).getId();
}
// Otherwise:
// - Make parent of this object (if applicable)
// - Copy this content
long parentId = 0;
if (content.getParent() != null) {
parentId = copyContent(content.getParent());
}
Content newContent;
if (content instanceof BlackboardArtifact) {
BlackboardArtifact artifactToCopy = (BlackboardArtifact)content;
newContent = copyArtifact(parentId, artifactToCopy);
} else {
CaseDbTransaction trans = portableSkCase.beginTransaction();
try {
if (content instanceof Image) {
Image image = (Image)content;
newContent = portableSkCase.addImage(image.getType(), image.getSsize(), image.getSize(), image.getName(),
new ArrayList<>(), image.getTimeZone(), image.getMd5(), image.getSha1(), image.getSha256(), image.getDeviceId(), trans);
} else if (content instanceof VolumeSystem) {
VolumeSystem vs = (VolumeSystem)content;
newContent = portableSkCase.addVolumeSystem(parentId, vs.getType(), vs.getOffset(), vs.getBlockSize(), trans);
} else if (content instanceof Volume) {
Volume vs = (Volume)content;
newContent = portableSkCase.addVolume(parentId, vs.getAddr(), vs.getStart(), vs.getLength(),
vs.getDescription(), vs.getFlags(), trans);
} else if (content instanceof Pool) {
Pool pool = (Pool)content;
newContent = portableSkCase.addPool(parentId, pool.getType(), trans);
} else if (content instanceof FileSystem) {
FileSystem fs = (FileSystem)content;
newContent = portableSkCase.addFileSystem(parentId, fs.getImageOffset(), fs.getFsType(), fs.getBlock_size(),
fs.getBlock_count(), fs.getRoot_inum(), fs.getFirst_inum(), fs.getLastInum(),
fs.getName(), trans);
} else if (content instanceof BlackboardArtifact) {
BlackboardArtifact artifactToCopy = (BlackboardArtifact)content;
newContent = copyArtifact(parentId, artifactToCopy);
} else if (content instanceof AbstractFile) {
AbstractFile abstractFile = (AbstractFile)content;
if (abstractFile instanceof LocalFilesDataSource) {
LocalFilesDataSource localFilesDS = (LocalFilesDataSource)abstractFile;
newContent = portableSkCase.addLocalFilesDataSource(localFilesDS.getDeviceId(), localFilesDS.getName(), localFilesDS.getTimeZone(), trans);
} else {
if (abstractFile.isDir()) {
newContent = portableSkCase.addLocalDirectory(parentId, abstractFile.getName(), trans);
} else {
try {
// Copy the file
String fileName = abstractFile.getId() + "-" + FileUtil.escapeFileName(abstractFile.getName());
String exportSubFolder = getExportSubfolder(abstractFile);
File exportFolder = Paths.get(copiedFilesFolder.toString(), exportSubFolder).toFile();
File localFile = new File(exportFolder, fileName);
ContentUtils.writeToFile(abstractFile, localFile);
// Get the new parent object in the portable case database
Content oldParent = abstractFile.getParent();
if (! oldIdToNewContent.containsKey(oldParent.getId())) {
throw new TskCoreException("Parent of file with ID " + abstractFile.getId() + " has not been created"); // NON-NLS
}
Content newParent = oldIdToNewContent.get(oldParent.getId());
// Construct the relative path to the copied file
String relativePath = FILE_FOLDER_NAME + File.separator + exportSubFolder + File.separator + fileName;
newContent = portableSkCase.addLocalFile(abstractFile.getName(), relativePath, abstractFile.getSize(),
abstractFile.getCtime(), abstractFile.getCrtime(), abstractFile.getAtime(), abstractFile.getMtime(),
abstractFile.getMd5Hash(), abstractFile.getKnown(), abstractFile.getMIMEType(),
true, TskData.EncodingType.NONE,
newParent, trans);
} catch (IOException ex) {
throw new TskCoreException("Error copying file " + abstractFile.getName() + " with original obj ID "
+ abstractFile.getId(), ex); // NON-NLS
}
}
}
} else {
throw new TskCoreException("Trying to copy unexpected Content type " + content.getClass().getName()); // NON-NLS
}
trans.commit();
} catch (TskCoreException ex) {
trans.rollback();
throw(ex);
}
}
// Save the new object
oldIdToNewContent.put(content.getId(), newContent);
newIdToContent.put(newContent.getId(), newContent);
return oldIdToNewContent.get(content.getId()).getId();
}
/**
* Return the subfolder name for this file based on MIME type
*
* @param abstractFile the file
*
* @return the name of the appropriate subfolder for this file type
*/
private String getExportSubfolder(AbstractFile abstractFile) {
if (abstractFile.getMIMEType() == null || abstractFile.getMIMEType().isEmpty()) {
return UNKNOWN_FILE_TYPE_FOLDER;
}
for (FileTypeCategory cat:FILE_TYPE_CATEGORIES) {
if (cat.getMediaTypes().contains(abstractFile.getMIMEType())) {
return cat.getDisplayName();
}
}
return UNKNOWN_FILE_TYPE_FOLDER;
}
/**
* Clear out the maps and other fields and close the database connections.
*/
private void cleanup() {
oldIdToNewContent.clear();
newIdToContent.clear();
oldTagNameToNewTagName.clear();
oldArtTypeIdToNewArtTypeId.clear();
oldAttrTypeIdToNewAttrType.clear();
oldArtifactIdToNewArtifact.clear();
closePortableCaseDatabase();
currentCase = null;
caseFolder = null;
copiedFilesFolder = null;
}
/**
* Close the portable case
*/
private void closePortableCaseDatabase() {
if (portableSkCase != null) {
portableSkCase.close();
portableSkCase = null;
}
}
/*@Override
public JPanel getConfigurationPanel() {
configPanel = new CreatePortableCasePanel();
return configPanel;
} */
private class StoreMaxIdCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback {
private final String tableName;
StoreMaxIdCallback(String tableName) {
this.tableName = tableName;
}
@Override
public void process(ResultSet rs) {
try {
while (rs.next()) {
try {
Long maxId = rs.getLong("max_id"); // NON-NLS
String query = " (table_name, max_id) VALUES ('" + tableName + "', '" + maxId + "')"; // NON-NLS
portableSkCase.getCaseDbAccessManager().insert(MAX_ID_TABLE_NAME, query);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Unable to get maximum ID from result set", ex); // NON-NLS
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to save maximum ID from result set", ex); // NON-NLS
}
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to get maximum ID from result set", ex); // NON-NLS
}
}
}
@NbBundle.Messages({
"PortableCaseReportModule.compressCase.errorFinding7zip=Could not locate 7-Zip executable",
"# {0} - Temp folder path",
"PortableCaseReportModule.compressCase.errorCreatingTempFolder=Could not create temporary folder {0}",
"PortableCaseReportModule.compressCase.errorCompressingCase=Error compressing case",
"PortableCaseReportModule.compressCase.canceled=Compression canceled by user",
})
private boolean compressCase(ReportProgressPanel progressPanel) {
// Close the portable case database (we still need some of the variables that would be cleared by cleanup())
closePortableCaseDatabase();
// Make a temporary folder for the compressed case
File tempZipFolder = Paths.get(currentCase.getTempDirectory(), "portableCase" + System.currentTimeMillis()).toFile(); // NON-NLS
if (! tempZipFolder.mkdir()) {
handleError("Error creating temporary folder " + tempZipFolder.toString(),
Bundle.PortableCaseReportModule_compressCase_errorCreatingTempFolder(tempZipFolder.toString()), null, progressPanel); // NON-NLS
return false;
}
// Find 7-Zip
File sevenZipExe = locate7ZipExecutable();
if (sevenZipExe == null) {
handleError("Error finding 7-Zip exectuable", Bundle.PortableCaseReportModule_compressCase_errorFinding7zip(), null, progressPanel); // NON-NLS
return false;
}
// Create the chunk option
String chunkOption = "";
if (settings.getChunkSize() != PortableCaseReportModuleSettings.ChunkSize.NONE) {
chunkOption = "-v" + settings.getChunkSize().getSevenZipParam();
}
File zipFile = Paths.get(tempZipFolder.getAbsolutePath(), caseName + ".zip").toFile(); // NON-NLS
ProcessBuilder procBuilder = new ProcessBuilder();
procBuilder.command(
sevenZipExe.getAbsolutePath(),
"a", // Add to archive
zipFile.getAbsolutePath(),
caseFolder.getAbsolutePath(),
chunkOption
);
try {
Process process = procBuilder.start();
while (process.isAlive()) {
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
process.destroy();
return false;
}
Thread.sleep(200);
}
int exitCode = process.exitValue();
if (exitCode != 0) {
// Save any errors so they can be logged
StringBuilder sb = new StringBuilder();
try (BufferedReader br = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
String line;
while ((line = br.readLine()) != null) {
sb.append(line).append(System.getProperty("line.separator")); // NON-NLS
}
}
handleError("Error compressing case\n7-Zip output: " + sb.toString(), Bundle.PortableCaseReportModule_compressCase_errorCompressingCase(), null, progressPanel); // NON-NLS
return false;
}
} catch (IOException | InterruptedException ex) {
handleError("Error compressing case", Bundle.PortableCaseReportModule_compressCase_errorCompressingCase(), ex, progressPanel); // NON-NLS
return false;
}
// Delete everything in the case folder then copy over the compressed file(s)
try {
FileUtils.cleanDirectory(caseFolder);
FileUtils.copyDirectory(tempZipFolder, caseFolder);
FileUtils.deleteDirectory(tempZipFolder);
} catch (IOException ex) {
handleError("Error compressing case", Bundle.PortableCaseReportModule_compressCase_errorCompressingCase(), ex, progressPanel); // NON-NLS
return false;
}
return true;
}
/**
* Locate the 7-Zip executable from the release folder.
*
* @return 7-Zip executable
*/
private static File locate7ZipExecutable() {
if (!PlatformUtil.isWindowsOS()) {
return null;
}
String executableToFindName = Paths.get("7-Zip", "7z.exe").toString(); // NON-NLS
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, PortableCaseReportModule.class.getPackage().getName(), false);
if (null == exeFile) {
return null;
}
if (!exeFile.canExecute()) {
return null;
}
return exeFile;
}
/**
* Processes the result sets from the interesting item set name query.
*/
public static class GetInterestingItemSetNamesCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback {
private static final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(GetInterestingItemSetNamesCallback.class.getName());
private final Map<String, Long> setCounts = new HashMap<>();
@Override
public void process(ResultSet rs) {
try {
while (rs.next()) {
try {
Long setCount = rs.getLong("set_count"); // NON-NLS
String setName = rs.getString("set_name"); // NON-NLS
setCounts.put(setName, setCount);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Unable to get data_source_obj_id or value from result set", ex); // NON-NLS
}
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to get next result for values by datasource", ex); // NON-NLS
}
}
/**
* Gets the counts for each interesting items set
*
* @return A map from each set name to the number of items in it
*/
public Map<String, Long> getSetCountMap() {
return setCounts;
}
}
}
| Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java | /*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.report.modules.portablecase;
import org.sleuthkit.autopsy.report.ReportModule;
import java.util.logging.Level;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.file.Paths;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.openide.modules.InstalledFileLocator;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.contentviewertags.ContentViewerTagManager;
import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.coreutils.FileTypeUtils.FileTypeCategory;
import org.sleuthkit.autopsy.report.ReportProgressPanel;
import org.sleuthkit.autopsy.report.modules.caseuco.CaseUcoFormatExporter;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.CaseDbAccessManager;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LocalFilesDataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem;
/**
* Creates a portable case from tagged files
*/
public class PortableCaseReportModule implements ReportModule {
private static final Logger logger = Logger.getLogger(PortableCaseReportModule.class.getName());
private static final String FILE_FOLDER_NAME = "PortableCaseFiles"; // NON-NLS
private static final String UNKNOWN_FILE_TYPE_FOLDER = "Other"; // NON-NLS
private static final String MAX_ID_TABLE_NAME = "portable_case_max_ids"; // NON-NLS
private PortableCaseReportModuleSettings settings;
// These are the types for the exported file subfolders
private static final List<FileTypeCategory> FILE_TYPE_CATEGORIES = Arrays.asList(FileTypeCategory.AUDIO, FileTypeCategory.DOCUMENTS,
FileTypeCategory.EXECUTABLE, FileTypeCategory.IMAGE, FileTypeCategory.VIDEO);
private Case currentCase = null;
private SleuthkitCase portableSkCase = null;
private String caseName = "";
private File caseFolder = null;
private File copiedFilesFolder = null;
// Maps old object ID from current case to new object in portable case
private final Map<Long, Content> oldIdToNewContent = new HashMap<>();
// Maps new object ID to the new object
private final Map<Long, Content> newIdToContent = new HashMap<>();
// Maps old TagName to new TagName
private final Map<TagName, TagName> oldTagNameToNewTagName = new HashMap<>();
// Map of old artifact type ID to new artifact type ID. There will only be changes if custom artifact types are present.
private final Map<Integer, Integer> oldArtTypeIdToNewArtTypeId = new HashMap<>();
// Map of old attribute type ID to new attribute type ID. There will only be changes if custom attr types are present.
private final Map<Integer, BlackboardAttribute.Type> oldAttrTypeIdToNewAttrType = new HashMap<>();
// Map of old artifact ID to new artifact
private final Map<Long, BlackboardArtifact> oldArtifactIdToNewArtifact = new HashMap<>();
public PortableCaseReportModule() {
}
@NbBundle.Messages({
"PortableCaseReportModule.getName.name=Portable Case"
})
@Override
public String getName() {
return Bundle.PortableCaseReportModule_getName_name();
}
@NbBundle.Messages({
"PortableCaseReportModule.getDescription.description=Copies selected items to a new single-user case that can be easily shared"
})
@Override
public String getDescription() {
return Bundle.PortableCaseReportModule_getDescription_description();
}
@Override
public String getRelativeFilePath() {
try {
caseName = Case.getCurrentCaseThrows().getDisplayName() + " (Portable)"; // NON-NLS
} catch (NoCurrentCaseException ex) {
// a case may not be open yet
return "";
}
return caseName;
}
/**
* Convenience method for handling cancellation
*
* @param progressPanel The report progress panel
*/
private void handleCancellation(ReportProgressPanel progressPanel) {
logger.log(Level.INFO, "Portable case creation canceled by user"); // NON-NLS
progressPanel.setIndeterminate(false);
progressPanel.complete(ReportProgressPanel.ReportStatus.CANCELED);
cleanup();
}
/**
* Convenience method to avoid code duplication.
* Assumes that if an exception is supplied then the error is SEVERE. Otherwise
* it is logged as a WARNING.
*
* @param logWarning Warning to write to the log
* @param dialogWarning Warning to write to a pop-up window
* @param ex The exception (can be null)
* @param progressPanel The report progress panel
*/
private void handleError(String logWarning, String dialogWarning, Exception ex, ReportProgressPanel progressPanel) {
if (ex == null) {
logger.log(Level.WARNING, logWarning);
} else {
logger.log(Level.SEVERE, logWarning, ex);
}
progressPanel.setIndeterminate(false);
progressPanel.complete(ReportProgressPanel.ReportStatus.ERROR, dialogWarning);
cleanup();
}
@NbBundle.Messages({
"PortableCaseReportModule.generateReport.verifying=Verifying selected parameters...",
"PortableCaseReportModule.generateReport.creatingCase=Creating portable case database...",
"PortableCaseReportModule.generateReport.copyingTags=Copying tags...",
"# {0} - tag name",
"PortableCaseReportModule.generateReport.copyingFiles=Copying files tagged as {0}...",
"# {0} - tag name",
"PortableCaseReportModule.generateReport.copyingArtifacts=Copying artifacts tagged as {0}...",
"# {0} - output folder",
"PortableCaseReportModule.generateReport.outputDirDoesNotExist=Output folder {0} does not exist",
"# {0} - output folder",
"PortableCaseReportModule.generateReport.outputDirIsNotDir=Output folder {0} is not a folder",
"PortableCaseReportModule.generateReport.caseClosed=Current case has been closed",
"PortableCaseReportModule.generateReport.interestingItemError=Error loading intersting items",
"PortableCaseReportModule.generateReport.errorReadingTags=Error while reading tags from case database",
"PortableCaseReportModule.generateReport.errorReadingSets=Error while reading interesting items sets from case database",
"PortableCaseReportModule.generateReport.noContentToCopy=No interesting files, results, or tagged items to copy",
"PortableCaseReportModule.generateReport.errorCopyingTags=Error copying tags",
"PortableCaseReportModule.generateReport.errorCopyingFiles=Error copying tagged files",
"PortableCaseReportModule.generateReport.errorCopyingArtifacts=Error copying tagged artifacts",
"PortableCaseReportModule.generateReport.errorCopyingInterestingFiles=Error copying interesting files",
"PortableCaseReportModule.generateReport.errorCopyingInterestingResults=Error copying interesting results",
"PortableCaseReportModule.generateReport.errorCreatingImageTagTable=Error creating image tags table",
"# {0} - attribute type name",
"PortableCaseReportModule.generateReport.errorLookingUpAttrType=Error looking up attribute type {0}",
"PortableCaseReportModule.generateReport.compressingCase=Compressing case...",
"PortableCaseReportModule.generateReport.errorCreatingReportFolder=Could not make report folder",
"PortableCaseReportModule.generateReport.errorGeneratingUCOreport=Problem while generating CASE-UCO report"
})
public void generateReport(String reportPath, PortableCaseReportModuleSettings options, ReportProgressPanel progressPanel) {
this.settings = options;
progressPanel.setIndeterminate(true);
progressPanel.start();
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_verifying());
// Clear out any old values
cleanup();
// Validate the input parameters
File outputDir = new File(reportPath);
if (! outputDir.exists()) {
handleError("Output folder " + outputDir.toString() + " does not exist",
Bundle.PortableCaseReportModule_generateReport_outputDirDoesNotExist(outputDir.toString()), null, progressPanel); // NON-NLS
return;
}
if (! outputDir.isDirectory()) {
handleError("Output folder " + outputDir.toString() + " is not a folder",
Bundle.PortableCaseReportModule_generateReport_outputDirIsNotDir(outputDir.toString()), null, progressPanel); // NON-NLS
return;
}
// Save the current case object
try {
currentCase = Case.getCurrentCaseThrows();
caseName = currentCase.getDisplayName() + " (Portable)"; // NON-NLS
} catch (NoCurrentCaseException ex) {
handleError("Current case has been closed",
Bundle.PortableCaseReportModule_generateReport_caseClosed(), null, progressPanel); // NON-NLS
return;
}
// Check that there will be something to copy
List<TagName> tagNames;
if (options.areAllTagsSelected()) {
try {
tagNames = Case.getCurrentCaseThrows().getServices().getTagsManager().getTagNamesInUse();
} catch (NoCurrentCaseException | TskCoreException ex) {
handleError("Unable to get all tags",
Bundle.PortableCaseReportModule_generateReport_errorReadingTags(), ex, progressPanel); // NON-NLS
return;
}
} else {
tagNames = options.getSelectedTagNames();
}
List<String> setNames;
if (options.areAllSetsSelected()) {
try {
setNames = getAllInterestingItemsSets();
} catch (NoCurrentCaseException | TskCoreException ex) {
handleError("Unable to get all interesting items sets",
Bundle.PortableCaseReportModule_generateReport_errorReadingSets(), ex, progressPanel); // NON-NLS
return;
}
} else {
setNames = options.getSelectedSetNames();
}
if (tagNames.isEmpty() && setNames.isEmpty()) {
handleError("No content to copy",
Bundle.PortableCaseReportModule_generateReport_noContentToCopy(), null, progressPanel); // NON-NLS
return;
}
// Create the case.
// portableSkCase and caseFolder will be set here.
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_creatingCase());
createCase(outputDir, progressPanel);
if (portableSkCase == null) {
// The error has already been handled
return;
}
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
// Set up the table for the image tags
try {
initializeImageTags(progressPanel);
} catch (TskCoreException ex) {
handleError("Error creating image tag table", Bundle.PortableCaseReportModule_generateReport_errorCreatingImageTagTable(), ex, progressPanel); // NON-NLS
return;
}
// Copy the selected tags
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_copyingTags());
try {
for(TagName tagName:tagNames) {
TagName newTagName = portableSkCase.addOrUpdateTagName(tagName.getDisplayName(), tagName.getDescription(), tagName.getColor(), tagName.getKnownStatus());
oldTagNameToNewTagName.put(tagName, newTagName);
}
} catch (TskCoreException ex) {
handleError("Error copying tags", Bundle.PortableCaseReportModule_generateReport_errorCopyingTags(), ex, progressPanel); // NON-NLS
return;
}
// Set up tracking to support any custom artifact or attribute types
for (BlackboardArtifact.ARTIFACT_TYPE type:BlackboardArtifact.ARTIFACT_TYPE.values()) {
oldArtTypeIdToNewArtTypeId.put(type.getTypeID(), type.getTypeID());
}
for (BlackboardAttribute.ATTRIBUTE_TYPE type:BlackboardAttribute.ATTRIBUTE_TYPE.values()) {
try {
oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getAttributeType(type.getLabel()));
} catch (TskCoreException ex) {
handleError("Error looking up attribute name " + type.getLabel(),
Bundle.PortableCaseReportModule_generateReport_errorLookingUpAttrType(type.getLabel()),
ex, progressPanel); // NON-NLS
}
}
// Copy the tagged files
try {
for(TagName tagName:tagNames) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_copyingFiles(tagName.getDisplayName()));
addFilesToPortableCase(tagName, progressPanel);
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
}
} catch (TskCoreException ex) {
handleError("Error copying tagged files", Bundle.PortableCaseReportModule_generateReport_errorCopyingFiles(), ex, progressPanel); // NON-NLS
return;
}
// Copy the tagged artifacts and associated files
try {
for(TagName tagName:tagNames) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_copyingArtifacts(tagName.getDisplayName()));
addArtifactsToPortableCase(tagName, progressPanel);
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
}
} catch (TskCoreException ex) {
handleError("Error copying tagged artifacts", Bundle.PortableCaseReportModule_generateReport_errorCopyingArtifacts(), ex, progressPanel); // NON-NLS
return;
}
// Copy interesting files and results
if (! setNames.isEmpty()) {
try {
List<BlackboardArtifact> interestingFiles = currentCase.getSleuthkitCase().getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
for (BlackboardArtifact art:interestingFiles) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
BlackboardAttribute setAttr = art.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
if (setNames.contains(setAttr.getValueString())) {
copyContentToPortableCase(art, progressPanel);
}
}
} catch (TskCoreException ex) {
handleError("Error copying interesting files", Bundle.PortableCaseReportModule_generateReport_errorCopyingInterestingFiles(), ex, progressPanel); // NON-NLS
return;
}
try {
List<BlackboardArtifact> interestingResults = currentCase.getSleuthkitCase().getBlackboardArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
for (BlackboardArtifact art:interestingResults) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
BlackboardAttribute setAttr = art.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
if (setNames.contains(setAttr.getValueString())) {
copyContentToPortableCase(art, progressPanel);
}
}
} catch (TskCoreException ex) {
handleError("Error copying interesting results", Bundle.PortableCaseReportModule_generateReport_errorCopyingInterestingResults(), ex, progressPanel); // NON-NLS
return;
}
}
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
File reportsFolder = Paths.get(caseFolder.toString(), "Reports").toFile();
if(!reportsFolder.mkdir()) {
handleError("Could not make report folder", Bundle.PortableCaseReportModule_generateReport_errorCreatingReportFolder(), null, progressPanel); // NON-NLS
return;
}
try {
CaseUcoFormatExporter.export(tagNames, setNames, reportsFolder, progressPanel);
} catch (IOException | SQLException | NoCurrentCaseException | TskCoreException ex) {
handleError("Problem while generating CASE-UCO report",
Bundle.PortableCaseReportModule_generateReport_errorGeneratingUCOreport(), ex, progressPanel); // NON-NLS
}
// Compress the case (if desired)
if (options.shouldCompress()) {
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_compressingCase());
boolean success = compressCase(progressPanel);
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
handleCancellation(progressPanel);
return;
}
if (! success) {
// Errors have been handled already
return;
}
}
// Close the case connections and clear out the maps
cleanup();
progressPanel.complete(ReportProgressPanel.ReportStatus.COMPLETE);
}
private List<String> getAllInterestingItemsSets() throws NoCurrentCaseException, TskCoreException {
// Get the set names in use for the current case.
List<String> setNames = new ArrayList<>();
Map<String, Long> setCounts;
// There may not be a case open when configuring report modules for Command Line execution
// Get all SET_NAMEs from interesting item artifacts
String innerSelect = "SELECT (value_text) AS set_name FROM blackboard_attributes WHERE (artifact_type_id = '"
+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID() + "' OR artifact_type_id = '"
+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + "') AND attribute_type_id = '"
+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + "'"; // NON-NLS
// Get the count of each SET_NAME
String query = "set_name, count(1) AS set_count FROM (" + innerSelect + ") set_names GROUP BY set_name"; // NON-NLS
GetInterestingItemSetNamesCallback callback = new GetInterestingItemSetNamesCallback();
Case.getCurrentCaseThrows().getSleuthkitCase().getCaseDbAccessManager().select(query, callback);
setCounts = callback.getSetCountMap();
setNames.addAll(setCounts.keySet());
return setNames;
}
/**
* Create the case directory and case database.
* portableSkCase will be set if this completes without error.
*
* @param outputDir The parent for the case folder
* @param progressPanel
*/
@NbBundle.Messages({
"# {0} - case folder",
"PortableCaseReportModule.createCase.caseDirExists=Case folder {0} already exists",
"PortableCaseReportModule.createCase.errorCreatingCase=Error creating case",
"# {0} - folder",
"PortableCaseReportModule.createCase.errorCreatingFolder=Error creating folder {0}",
"PortableCaseReportModule.createCase.errorStoringMaxIds=Error storing maximum database IDs",
})
private void createCase(File outputDir, ReportProgressPanel progressPanel) {
// Create the case folder
caseFolder = Paths.get(outputDir.toString(), caseName).toFile();
if (caseFolder.exists()) {
handleError("Case folder " + caseFolder.toString() + " already exists",
Bundle.PortableCaseReportModule_createCase_caseDirExists(caseFolder.toString()), null, progressPanel); // NON-NLS
return;
}
// Create the case
try {
portableSkCase = currentCase.createPortableCase(caseName, caseFolder);
} catch (TskCoreException ex) {
handleError("Error creating case " + caseName + " in folder " + caseFolder.toString(),
Bundle.PortableCaseReportModule_createCase_errorCreatingCase(), ex, progressPanel); // NON-NLS
return;
}
// Store the highest IDs
try {
saveHighestIds();
} catch (TskCoreException ex) {
handleError("Error storing maximum database IDs",
Bundle.PortableCaseReportModule_createCase_errorStoringMaxIds(), ex, progressPanel); // NON-NLS
return;
}
// Create the base folder for the copied files
copiedFilesFolder = Paths.get(caseFolder.toString(), FILE_FOLDER_NAME).toFile();
if (! copiedFilesFolder.mkdir()) {
handleError("Error creating folder " + copiedFilesFolder.toString(),
Bundle.PortableCaseReportModule_createCase_errorCreatingFolder(copiedFilesFolder.toString()), null, progressPanel); // NON-NLS
return;
}
// Create subfolders for the copied files
for (FileTypeCategory cat:FILE_TYPE_CATEGORIES) {
File subFolder = Paths.get(copiedFilesFolder.toString(), cat.getDisplayName()).toFile();
if (! subFolder.mkdir()) {
handleError("Error creating folder " + subFolder.toString(),
Bundle.PortableCaseReportModule_createCase_errorCreatingFolder(subFolder.toString()), null, progressPanel); // NON-NLS
return;
}
}
File unknownTypeFolder = Paths.get(copiedFilesFolder.toString(), UNKNOWN_FILE_TYPE_FOLDER).toFile();
if (! unknownTypeFolder.mkdir()) {
handleError("Error creating folder " + unknownTypeFolder.toString(),
Bundle.PortableCaseReportModule_createCase_errorCreatingFolder(unknownTypeFolder.toString()), null, progressPanel); // NON-NLS
return;
}
}
/**
* Save the current highest IDs to the portable case.
*
* @throws TskCoreException
*/
private void saveHighestIds() throws TskCoreException {
CaseDbAccessManager currentCaseDbManager = currentCase.getSleuthkitCase().getCaseDbAccessManager();
String tableSchema = "( table_name TEXT PRIMARY KEY, "
+ " max_id TEXT)"; // NON-NLS
portableSkCase.getCaseDbAccessManager().createTable(MAX_ID_TABLE_NAME, tableSchema);
currentCaseDbManager.select("max(obj_id) as max_id from tsk_objects", new StoreMaxIdCallback("tsk_objects")); // NON-NLS
currentCaseDbManager.select("max(tag_id) as max_id from content_tags", new StoreMaxIdCallback("content_tags")); // NON-NLS
currentCaseDbManager.select("max(tag_id) as max_id from blackboard_artifact_tags", new StoreMaxIdCallback("blackboard_artifact_tags")); // NON-NLS
currentCaseDbManager.select("max(examiner_id) as max_id from tsk_examiners", new StoreMaxIdCallback("tsk_examiners")); // NON-NLS
}
/**
* Set up the image tag table in the portable case
*
* @param progressPanel
*
* @throws TskCoreException
*/
private void initializeImageTags(ReportProgressPanel progressPanel) throws TskCoreException {
// Create the image tags table in the portable case
CaseDbAccessManager portableDbAccessManager = portableSkCase.getCaseDbAccessManager();
if (! portableDbAccessManager.tableExists(ContentViewerTagManager.TABLE_NAME)) {
portableDbAccessManager.createTable(ContentViewerTagManager.TABLE_NAME, ContentViewerTagManager.TABLE_SCHEMA_SQLITE);
}
}
/**
* Add all files with a given tag to the portable case.
*
* @param oldTagName The TagName object from the current case
* @param progressPanel The progress panel
*
* @throws TskCoreException
*/
private void addFilesToPortableCase(TagName oldTagName, ReportProgressPanel progressPanel) throws TskCoreException {
// Get all the tags in the current case
List<ContentTag> tags = currentCase.getServices().getTagsManager().getContentTagsByTagName(oldTagName);
// Copy the files into the portable case and tag
for (ContentTag tag : tags) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
return;
}
Content content = tag.getContent();
if (content instanceof AbstractFile) {
long newFileId = copyContentToPortableCase(content, progressPanel);
// Tag the file
if (! oldTagNameToNewTagName.containsKey(tag.getName())) {
throw new TskCoreException("TagName map is missing entry for ID " + tag.getName().getId() + " with display name " + tag.getName().getDisplayName()); // NON-NLS
}
ContentTag newContentTag = portableSkCase.addContentTag(newIdToContent.get(newFileId), oldTagNameToNewTagName.get(tag.getName()), tag.getComment(), tag.getBeginByteOffset(), tag.getEndByteOffset());
// Get the image tag data associated with this tag (empty string if there is none)
// and save it if present
String appData = getImageTagDataForContentTag(tag);
if (! appData.isEmpty()) {
addImageTagToPortableCase(newContentTag, appData);
}
}
}
}
/**
* Gets the image tag data for a given content tag
*
* @param tag The ContentTag in the current case
*
* @return The app_data string for this content tag or an empty string if there was none
*
* @throws TskCoreException
*/
private String getImageTagDataForContentTag(ContentTag tag) throws TskCoreException {
GetImageTagCallback callback = new GetImageTagCallback();
String query = "* FROM " + ContentViewerTagManager.TABLE_NAME + " WHERE content_tag_id = " + tag.getId();
currentCase.getSleuthkitCase().getCaseDbAccessManager().select(query, callback);
return callback.getAppData();
}
/**
* CaseDbAccessManager callback to get the app_data string for the image tag
*/
private static class GetImageTagCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback {
private static final Logger logger = Logger.getLogger(PortableCaseReportModule.class.getName());
private String appData = "";
@Override
public void process(ResultSet rs) {
try {
while (rs.next()) {
try {
appData = rs.getString("app_data"); // NON-NLS
} catch (SQLException ex) {
logger.log(Level.WARNING, "Unable to get app_data from result set", ex); // NON-NLS
}
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to get next result for app_data", ex); // NON-NLS
}
}
/**
* Get the app_data string
*
* @return the app_data string
*/
String getAppData() {
return appData;
}
}
/**
* Add an image tag to the portable case.
*
* @param newContentTag The content tag in the portable case
* @param appData The string to copy into app_data
*
* @throws TskCoreException
*/
private void addImageTagToPortableCase(ContentTag newContentTag, String appData) throws TskCoreException {
String insert = "(content_tag_id, app_data) VALUES (" + newContentTag.getId() + ", '" + appData + "')";
portableSkCase.getCaseDbAccessManager().insert(ContentViewerTagManager.TABLE_NAME, insert);
}
/**
* Add all artifacts with a given tag to the portable case.
*
* @param oldTagName The TagName object from the current case
* @param progressPanel The progress panel
*
* @throws TskCoreException
*/
private void addArtifactsToPortableCase(TagName oldTagName, ReportProgressPanel progressPanel) throws TskCoreException {
List<BlackboardArtifactTag> tags = currentCase.getServices().getTagsManager().getBlackboardArtifactTagsByTagName(oldTagName);
// Copy the artifacts into the portable case along with their content and tag
for (BlackboardArtifactTag tag : tags) {
// Check for cancellation
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
return;
}
// Copy the source content
Content content = tag.getContent();
long newContentId = copyContentToPortableCase(content, progressPanel);
// Copy the artifact
BlackboardArtifact newArtifact = copyArtifact(newContentId, tag.getArtifact());
// Tag the artfiact
if (! oldTagNameToNewTagName.containsKey(tag.getName())) {
throw new TskCoreException("TagName map is missing entry for ID " + tag.getName().getId() + " with display name " + tag.getName().getDisplayName()); // NON-NLS
}
portableSkCase.addBlackboardArtifactTag(newArtifact, oldTagNameToNewTagName.get(tag.getName()), tag.getComment());
}
}
/**
* Copy an artifact into the new case. Will also copy any associated artifacts
*
* @param newContentId The content ID (in the portable case) of the source content
* @param artifactToCopy The artifact to copy
*
* @return The new artifact in the portable case
*
* @throws TskCoreException
*/
private BlackboardArtifact copyArtifact(long newContentId, BlackboardArtifact artifactToCopy) throws TskCoreException {
if (oldArtifactIdToNewArtifact.containsKey(artifactToCopy.getArtifactID())) {
return oldArtifactIdToNewArtifact.get(artifactToCopy.getArtifactID());
}
// First create the associated artifact (if present)
BlackboardAttribute oldAssociatedAttribute = artifactToCopy.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT));
List<BlackboardAttribute> newAttrs = new ArrayList<>();
if (oldAssociatedAttribute != null) {
BlackboardArtifact oldAssociatedArtifact = currentCase.getSleuthkitCase().getBlackboardArtifact(oldAssociatedAttribute.getValueLong());
BlackboardArtifact newAssociatedArtifact = copyArtifact(newContentId, oldAssociatedArtifact);
newAttrs.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT,
String.join(",", oldAssociatedAttribute.getSources()), newAssociatedArtifact.getArtifactID()));
}
// Create the new artifact
int newArtifactTypeId = getNewArtifactTypeId(artifactToCopy);
BlackboardArtifact newArtifact = portableSkCase.newBlackboardArtifact(newArtifactTypeId, newContentId);
List<BlackboardAttribute> oldAttrs = artifactToCopy.getAttributes();
// Copy over each attribute, making sure the type is in the new case.
for (BlackboardAttribute oldAttr:oldAttrs) {
// The associated artifact has already been handled
if (oldAttr.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT.getTypeID()) {
continue;
}
BlackboardAttribute.Type newAttributeType = getNewAttributeType(oldAttr);
switch (oldAttr.getValueType()) {
case BYTE:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueBytes()));
break;
case DOUBLE:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueDouble()));
break;
case INTEGER:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueInt()));
break;
case DATETIME:
case LONG:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueLong()));
break;
case STRING:
case JSON:
newAttrs.add(new BlackboardAttribute(newAttributeType, String.join(",", oldAttr.getSources()),
oldAttr.getValueString()));
break;
default:
throw new TskCoreException("Unexpected attribute value type found: " + oldAttr.getValueType().getLabel()); // NON-NLS
}
}
newArtifact.addAttributes(newAttrs);
oldArtifactIdToNewArtifact.put(artifactToCopy.getArtifactID(), newArtifact);
return newArtifact;
}
/**
* Get the artifact type ID in the portable case and create new artifact type if needed.
* For built-in artifacts this will be the same as the original.
*
* @param oldArtifact The artifact in the current case
*
* @return The corresponding artifact type ID in the portable case
*/
private int getNewArtifactTypeId(BlackboardArtifact oldArtifact) throws TskCoreException {
if (oldArtTypeIdToNewArtTypeId.containsKey(oldArtifact.getArtifactTypeID())) {
return oldArtTypeIdToNewArtTypeId.get(oldArtifact.getArtifactTypeID());
}
BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getArtifactType(oldArtifact.getArtifactTypeName());
try {
BlackboardArtifact.Type newCustomType = portableSkCase.addBlackboardArtifactType(oldCustomType.getTypeName(), oldCustomType.getDisplayName());
oldArtTypeIdToNewArtTypeId.put(oldArtifact.getArtifactTypeID(), newCustomType.getTypeID());
return newCustomType.getTypeID();
} catch (TskDataException ex) {
throw new TskCoreException("Error creating new artifact type " + oldCustomType.getTypeName(), ex); // NON-NLS
}
}
/**
* Get the attribute type ID in the portable case and create new attribute type if needed.
* For built-in attributes this will be the same as the original.
*
* @param oldAttribute The attribute in the current case
*
* @return The corresponding attribute type in the portable case
*/
private BlackboardAttribute.Type getNewAttributeType(BlackboardAttribute oldAttribute) throws TskCoreException {
BlackboardAttribute.Type oldAttrType = oldAttribute.getAttributeType();
if (oldAttrTypeIdToNewAttrType.containsKey(oldAttrType.getTypeID())) {
return oldAttrTypeIdToNewAttrType.get(oldAttrType.getTypeID());
}
try {
BlackboardAttribute.Type newCustomType = portableSkCase.addArtifactAttributeType(oldAttrType.getTypeName(),
oldAttrType.getValueType(), oldAttrType.getDisplayName());
oldAttrTypeIdToNewAttrType.put(oldAttribute.getAttributeType().getTypeID(), newCustomType);
return newCustomType;
} catch (TskDataException ex) {
throw new TskCoreException("Error creating new attribute type " + oldAttrType.getTypeName(), ex); // NON-NLS
}
}
/**
* Top level method to copy a content object to the portable case.
*
* @param content The content object to copy
* @param progressPanel The progress panel
*
* @return The object ID of the copied content in the portable case
*
* @throws TskCoreException
*/
@NbBundle.Messages({
"# {0} - File name",
"PortableCaseReportModule.copyContentToPortableCase.copyingFile=Copying file {0}",
})
private long copyContentToPortableCase(Content content, ReportProgressPanel progressPanel) throws TskCoreException {
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_copyContentToPortableCase_copyingFile(content.getUniquePath()));
return copyContent(content);
}
/**
* Returns the object ID for the given content object in the portable case.
*
* @param content The content object to copy into the portable case
*
* @return the new object ID for this content
*
* @throws TskCoreException
*/
private long copyContent(Content content) throws TskCoreException {
// Check if we've already copied this content
if (oldIdToNewContent.containsKey(content.getId())) {
return oldIdToNewContent.get(content.getId()).getId();
}
// Otherwise:
// - Make parent of this object (if applicable)
// - Copy this content
long parentId = 0;
if (content.getParent() != null) {
parentId = copyContent(content.getParent());
}
Content newContent;
if (content instanceof BlackboardArtifact) {
BlackboardArtifact artifactToCopy = (BlackboardArtifact)content;
newContent = copyArtifact(parentId, artifactToCopy);
} else {
CaseDbTransaction trans = portableSkCase.beginTransaction();
try {
if (content instanceof Image) {
Image image = (Image)content;
newContent = portableSkCase.addImage(image.getType(), image.getSsize(), image.getSize(), image.getName(),
new ArrayList<>(), image.getTimeZone(), image.getMd5(), image.getSha1(), image.getSha256(), image.getDeviceId(), trans);
} else if (content instanceof VolumeSystem) {
VolumeSystem vs = (VolumeSystem)content;
newContent = portableSkCase.addVolumeSystem(parentId, vs.getType(), vs.getOffset(), vs.getBlockSize(), trans);
} else if (content instanceof Volume) {
Volume vs = (Volume)content;
newContent = portableSkCase.addVolume(parentId, vs.getAddr(), vs.getStart(), vs.getLength(),
vs.getDescription(), vs.getFlags(), trans);
} else if (content instanceof FileSystem) {
FileSystem fs = (FileSystem)content;
newContent = portableSkCase.addFileSystem(parentId, fs.getImageOffset(), fs.getFsType(), fs.getBlock_size(),
fs.getBlock_count(), fs.getRoot_inum(), fs.getFirst_inum(), fs.getLastInum(),
fs.getName(), trans);
} else if (content instanceof BlackboardArtifact) {
BlackboardArtifact artifactToCopy = (BlackboardArtifact)content;
newContent = copyArtifact(parentId, artifactToCopy);
} else if (content instanceof AbstractFile) {
AbstractFile abstractFile = (AbstractFile)content;
if (abstractFile instanceof LocalFilesDataSource) {
LocalFilesDataSource localFilesDS = (LocalFilesDataSource)abstractFile;
newContent = portableSkCase.addLocalFilesDataSource(localFilesDS.getDeviceId(), localFilesDS.getName(), localFilesDS.getTimeZone(), trans);
} else {
if (abstractFile.isDir()) {
newContent = portableSkCase.addLocalDirectory(parentId, abstractFile.getName(), trans);
} else {
try {
// Copy the file
String fileName = abstractFile.getId() + "-" + FileUtil.escapeFileName(abstractFile.getName());
String exportSubFolder = getExportSubfolder(abstractFile);
File exportFolder = Paths.get(copiedFilesFolder.toString(), exportSubFolder).toFile();
File localFile = new File(exportFolder, fileName);
ContentUtils.writeToFile(abstractFile, localFile);
// Get the new parent object in the portable case database
Content oldParent = abstractFile.getParent();
if (! oldIdToNewContent.containsKey(oldParent.getId())) {
throw new TskCoreException("Parent of file with ID " + abstractFile.getId() + " has not been created"); // NON-NLS
}
Content newParent = oldIdToNewContent.get(oldParent.getId());
// Construct the relative path to the copied file
String relativePath = FILE_FOLDER_NAME + File.separator + exportSubFolder + File.separator + fileName;
newContent = portableSkCase.addLocalFile(abstractFile.getName(), relativePath, abstractFile.getSize(),
abstractFile.getCtime(), abstractFile.getCrtime(), abstractFile.getAtime(), abstractFile.getMtime(),
abstractFile.getMd5Hash(), abstractFile.getKnown(), abstractFile.getMIMEType(),
true, TskData.EncodingType.NONE,
newParent, trans);
} catch (IOException ex) {
throw new TskCoreException("Error copying file " + abstractFile.getName() + " with original obj ID "
+ abstractFile.getId(), ex); // NON-NLS
}
}
}
} else {
throw new TskCoreException("Trying to copy unexpected Content type " + content.getClass().getName()); // NON-NLS
}
trans.commit();
} catch (TskCoreException ex) {
trans.rollback();
throw(ex);
}
}
// Save the new object
oldIdToNewContent.put(content.getId(), newContent);
newIdToContent.put(newContent.getId(), newContent);
return oldIdToNewContent.get(content.getId()).getId();
}
/**
* Return the subfolder name for this file based on MIME type
*
* @param abstractFile the file
*
* @return the name of the appropriate subfolder for this file type
*/
private String getExportSubfolder(AbstractFile abstractFile) {
if (abstractFile.getMIMEType() == null || abstractFile.getMIMEType().isEmpty()) {
return UNKNOWN_FILE_TYPE_FOLDER;
}
for (FileTypeCategory cat:FILE_TYPE_CATEGORIES) {
if (cat.getMediaTypes().contains(abstractFile.getMIMEType())) {
return cat.getDisplayName();
}
}
return UNKNOWN_FILE_TYPE_FOLDER;
}
/**
* Clear out the maps and other fields and close the database connections.
*/
private void cleanup() {
oldIdToNewContent.clear();
newIdToContent.clear();
oldTagNameToNewTagName.clear();
oldArtTypeIdToNewArtTypeId.clear();
oldAttrTypeIdToNewAttrType.clear();
oldArtifactIdToNewArtifact.clear();
closePortableCaseDatabase();
currentCase = null;
caseFolder = null;
copiedFilesFolder = null;
}
/**
* Close the portable case
*/
private void closePortableCaseDatabase() {
if (portableSkCase != null) {
portableSkCase.close();
portableSkCase = null;
}
}
/*@Override
public JPanel getConfigurationPanel() {
configPanel = new CreatePortableCasePanel();
return configPanel;
} */
private class StoreMaxIdCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback {
private final String tableName;
StoreMaxIdCallback(String tableName) {
this.tableName = tableName;
}
@Override
public void process(ResultSet rs) {
try {
while (rs.next()) {
try {
Long maxId = rs.getLong("max_id"); // NON-NLS
String query = " (table_name, max_id) VALUES ('" + tableName + "', '" + maxId + "')"; // NON-NLS
portableSkCase.getCaseDbAccessManager().insert(MAX_ID_TABLE_NAME, query);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Unable to get maximum ID from result set", ex); // NON-NLS
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to save maximum ID from result set", ex); // NON-NLS
}
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to get maximum ID from result set", ex); // NON-NLS
}
}
}
@NbBundle.Messages({
"PortableCaseReportModule.compressCase.errorFinding7zip=Could not locate 7-Zip executable",
"# {0} - Temp folder path",
"PortableCaseReportModule.compressCase.errorCreatingTempFolder=Could not create temporary folder {0}",
"PortableCaseReportModule.compressCase.errorCompressingCase=Error compressing case",
"PortableCaseReportModule.compressCase.canceled=Compression canceled by user",
})
private boolean compressCase(ReportProgressPanel progressPanel) {
// Close the portable case database (we still need some of the variables that would be cleared by cleanup())
closePortableCaseDatabase();
// Make a temporary folder for the compressed case
File tempZipFolder = Paths.get(currentCase.getTempDirectory(), "portableCase" + System.currentTimeMillis()).toFile(); // NON-NLS
if (! tempZipFolder.mkdir()) {
handleError("Error creating temporary folder " + tempZipFolder.toString(),
Bundle.PortableCaseReportModule_compressCase_errorCreatingTempFolder(tempZipFolder.toString()), null, progressPanel); // NON-NLS
return false;
}
// Find 7-Zip
File sevenZipExe = locate7ZipExecutable();
if (sevenZipExe == null) {
handleError("Error finding 7-Zip exectuable", Bundle.PortableCaseReportModule_compressCase_errorFinding7zip(), null, progressPanel); // NON-NLS
return false;
}
// Create the chunk option
String chunkOption = "";
if (settings.getChunkSize() != PortableCaseReportModuleSettings.ChunkSize.NONE) {
chunkOption = "-v" + settings.getChunkSize().getSevenZipParam();
}
File zipFile = Paths.get(tempZipFolder.getAbsolutePath(), caseName + ".zip").toFile(); // NON-NLS
ProcessBuilder procBuilder = new ProcessBuilder();
procBuilder.command(
sevenZipExe.getAbsolutePath(),
"a", // Add to archive
zipFile.getAbsolutePath(),
caseFolder.getAbsolutePath(),
chunkOption
);
try {
Process process = procBuilder.start();
while (process.isAlive()) {
if (progressPanel.getStatus() == ReportProgressPanel.ReportStatus.CANCELED) {
process.destroy();
return false;
}
Thread.sleep(200);
}
int exitCode = process.exitValue();
if (exitCode != 0) {
// Save any errors so they can be logged
StringBuilder sb = new StringBuilder();
try (BufferedReader br = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
String line;
while ((line = br.readLine()) != null) {
sb.append(line).append(System.getProperty("line.separator")); // NON-NLS
}
}
handleError("Error compressing case\n7-Zip output: " + sb.toString(), Bundle.PortableCaseReportModule_compressCase_errorCompressingCase(), null, progressPanel); // NON-NLS
return false;
}
} catch (IOException | InterruptedException ex) {
handleError("Error compressing case", Bundle.PortableCaseReportModule_compressCase_errorCompressingCase(), ex, progressPanel); // NON-NLS
return false;
}
// Delete everything in the case folder then copy over the compressed file(s)
try {
FileUtils.cleanDirectory(caseFolder);
FileUtils.copyDirectory(tempZipFolder, caseFolder);
FileUtils.deleteDirectory(tempZipFolder);
} catch (IOException ex) {
handleError("Error compressing case", Bundle.PortableCaseReportModule_compressCase_errorCompressingCase(), ex, progressPanel); // NON-NLS
return false;
}
return true;
}
/**
* Locate the 7-Zip executable from the release folder.
*
* @return 7-Zip executable
*/
private static File locate7ZipExecutable() {
if (!PlatformUtil.isWindowsOS()) {
return null;
}
String executableToFindName = Paths.get("7-Zip", "7z.exe").toString(); // NON-NLS
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, PortableCaseReportModule.class.getPackage().getName(), false);
if (null == exeFile) {
return null;
}
if (!exeFile.canExecute()) {
return null;
}
return exeFile;
}
/**
* Processes the result sets from the interesting item set name query.
*/
public static class GetInterestingItemSetNamesCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback {
private static final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(GetInterestingItemSetNamesCallback.class.getName());
private final Map<String, Long> setCounts = new HashMap<>();
@Override
public void process(ResultSet rs) {
try {
while (rs.next()) {
try {
Long setCount = rs.getLong("set_count"); // NON-NLS
String setName = rs.getString("set_name"); // NON-NLS
setCounts.put(setName, setCount);
} catch (SQLException ex) {
logger.log(Level.WARNING, "Unable to get data_source_obj_id or value from result set", ex); // NON-NLS
}
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "Failed to get next result for values by datasource", ex); // NON-NLS
}
}
/**
* Gets the counts for each interesting items set
*
* @return A map from each set name to the number of items in it
*/
public Map<String, Long> getSetCountMap() {
return setCounts;
}
}
}
| Support pools in portable cases
| Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java | Support pools in portable cases |
|
Java | apache-2.0 | 2a082b599b6819882667042da3fe8ac9f6b9b9cd | 0 | dashorst/wicket,apache/wicket,zwsong/wicket,AlienQueen/wicket,aldaris/wicket,dashorst/wicket,apache/wicket,selckin/wicket,dashorst/wicket,aldaris/wicket,bitstorm/wicket,klopfdreh/wicket,freiheit-com/wicket,astrapi69/wicket,AlienQueen/wicket,Servoy/wicket,klopfdreh/wicket,Servoy/wicket,zwsong/wicket,dashorst/wicket,freiheit-com/wicket,AlienQueen/wicket,topicusonderwijs/wicket,dashorst/wicket,selckin/wicket,martin-g/wicket-osgi,topicusonderwijs/wicket,apache/wicket,mosoft521/wicket,zwsong/wicket,martin-g/wicket-osgi,bitstorm/wicket,freiheit-com/wicket,selckin/wicket,apache/wicket,mosoft521/wicket,selckin/wicket,AlienQueen/wicket,topicusonderwijs/wicket,freiheit-com/wicket,zwsong/wicket,selckin/wicket,Servoy/wicket,mosoft521/wicket,bitstorm/wicket,martin-g/wicket-osgi,mafulafunk/wicket,astrapi69/wicket,aldaris/wicket,aldaris/wicket,astrapi69/wicket,Servoy/wicket,apache/wicket,klopfdreh/wicket,klopfdreh/wicket,Servoy/wicket,bitstorm/wicket,bitstorm/wicket,astrapi69/wicket,aldaris/wicket,klopfdreh/wicket,mosoft521/wicket,mafulafunk/wicket,topicusonderwijs/wicket,mafulafunk/wicket,AlienQueen/wicket,freiheit-com/wicket,topicusonderwijs/wicket,mosoft521/wicket | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wicket.util.file;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import wicket.util.io.Streams;
import wicket.util.time.Time;
import wicket.util.watch.IModifiable;
/**
* Simple extension of File that adds an implementation of IModifiable for
* files. This allows the ModificationWatcher class to watch files for
* modification. The IModifiable.lastModifiedTime() method also returns a Time
* object with a more convenient API than either Date or a value in
* milliseconds.
*
* @author Jonathan Locke
*/
public class File extends java.io.File implements IModifiable
{
private static final long serialVersionUID = 1L;
/**
* Constructor.
*
* @param parent
* parent
* @param child
* child
*/
public File(final File parent, final String child)
{
super(parent, child);
}
public File(final java.io.File parent, final String child)
{
super(parent, child);
}
/**
* Construct.
*
* @param file
* File from java.io package
*/
public File(final java.io.File file)
{
super(file.getAbsolutePath());
}
/**
* Constructor.
*
* @param pathname
* path name
*/
public File(final String pathname)
{
super(pathname);
}
/**
* Constructor.
*
* @param parent
* parent
* @param child
* child
*/
public File(final String parent, final String child)
{
super(parent, child);
}
/**
* Constructor.
*
* @param uri
* file uri
*/
public File(final URI uri)
{
super(uri);
}
/**
* @return File extension (whatever is after the last '.' in the file name)
*/
public String getExtension()
{
final int lastDot = getName().lastIndexOf('.');
if (lastDot >= 0)
{
return getName().substring(lastDot + 1);
}
return null;
}
/**
* @return Parent folder
*/
public Folder getParentFolder()
{
return new Folder(getParent());
}
/**
* Returns a Time object representing the most recent time this file was
* modified.
*
* @return This file's lastModified() value as a Time object
*/
public final Time lastModifiedTime()
{
return Time.milliseconds(lastModified());
}
/**
* @return String read from this file
* @throws IOException
*/
public final String readString() throws IOException
{
final InputStream in = new FileInputStream(this);
try
{
return Streams.readString(in);
}
finally
{
in.close();
}
}
/**
* @return True if the file was removed
* @see java.io.File#delete()
*/
public boolean remove()
{
return Files.remove(this);
}
/**
* Force contents of file to physical storage
* @throws IOException
*/
public void sync() throws IOException
{
final FileInputStream in = new FileInputStream(this);
try
{
in.getFD().sync();
}
finally
{
in.close();
}
}
/**
* Writes the given file to this one
*
* @param file
* The file to copy
* @throws IOException
*/
public final void write(final File file) throws IOException
{
final InputStream in = new BufferedInputStream(new FileInputStream(file));
try
{
write(in);
}
finally
{
in.close();
}
}
/**
* Writes the given input stream to this file
*
* @param input
* The input
* @return Number of bytes written
* @throws IOException
*/
public final int write(final InputStream input) throws IOException
{
return Files.writeTo(this, input);
}
/**
* Write the given string to this file
*
* @param string
* The string to write
* @throws IOException
*/
public final void write(final String string) throws IOException
{
final FileWriter out = new FileWriter(this);
try
{
out.write(string);
}
finally
{
out.close();
}
}
}
| wicket/src/main/java/wicket/util/file/File.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wicket.util.file;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import wicket.util.io.Streams;
import wicket.util.time.Time;
import wicket.util.watch.IModifiable;
/**
* Simple extension of File that adds an implementation of IModifiable for
* files. This allows the ModificationWatcher class to watch files for
* modification. The IModifiable.lastModifiedTime() method also returns a Time
* object with a more convenient API than either Date or a value in
* milliseconds.
*
* @author Jonathan Locke
*/
public class File extends java.io.File implements IModifiable
{
private static final long serialVersionUID = 1L;
/**
* Constructor.
*
* @param parent
* parent
* @param child
* child
*/
public File(final File parent, final String child)
{
super(parent, child);
}
/**
* Construct.
*
* @param file
* File from java.io package
*/
public File(final java.io.File file)
{
super(file.getAbsolutePath());
}
/**
* Constructor.
*
* @param pathname
* path name
*/
public File(final String pathname)
{
super(pathname);
}
/**
* Constructor.
*
* @param parent
* parent
* @param child
* child
*/
public File(final String parent, final String child)
{
super(parent, child);
}
/**
* Constructor.
*
* @param uri
* file uri
*/
public File(final URI uri)
{
super(uri);
}
/**
* @return File extension (whatever is after the last '.' in the file name)
*/
public String getExtension()
{
final int lastDot = getName().lastIndexOf('.');
if (lastDot >= 0)
{
return getName().substring(lastDot + 1);
}
return null;
}
/**
* @return Parent folder
*/
public Folder getParentFolder()
{
return new Folder(getParent());
}
/**
* Returns a Time object representing the most recent time this file was
* modified.
*
* @return This file's lastModified() value as a Time object
*/
public final Time lastModifiedTime()
{
return Time.milliseconds(lastModified());
}
/**
* @return String read from this file
* @throws IOException
*/
public final String readString() throws IOException
{
final InputStream in = new FileInputStream(this);
try
{
return Streams.readString(in);
}
finally
{
in.close();
}
}
/**
* @return True if the file was removed
* @see java.io.File#delete()
*/
public boolean remove()
{
return Files.remove(this);
}
/**
* Force contents of file to physical storage
* @throws IOException
*/
public void sync() throws IOException
{
final FileInputStream in = new FileInputStream(this);
try
{
in.getFD().sync();
}
finally
{
in.close();
}
}
/**
* Writes the given file to this one
*
* @param file
* The file to copy
* @throws IOException
*/
public final void write(final File file) throws IOException
{
final InputStream in = new BufferedInputStream(new FileInputStream(file));
try
{
write(in);
}
finally
{
in.close();
}
}
/**
* Writes the given input stream to this file
*
* @param input
* The input
* @return Number of bytes written
* @throws IOException
*/
public final int write(final InputStream input) throws IOException
{
return Files.writeTo(this, input);
}
/**
* Write the given string to this file
*
* @param string
* The string to write
* @throws IOException
*/
public final void write(final String string) throws IOException
{
final FileWriter out = new FileWriter(this);
try
{
out.write(string);
}
finally
{
out.close();
}
}
}
| Adding constructor File(java.io.File, String)
git-svn-id: 6d6ade8e88b1292e17cba3559b7335a947e495e0@513837 13f79535-47bb-0310-9956-ffa450edef68
| wicket/src/main/java/wicket/util/file/File.java | Adding constructor File(java.io.File, String) |
|
Java | bsd-3-clause | baaa09e6a026e581e8a279ad344404042e625198 | 0 | vivo-project/Vitro,vivo-project/Vitro,vivo-project/Vitro,vivo-project/Vitro | /* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller.edit;
import java.io.IOException;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.shared.Lock;
import edu.cornell.mannlib.vedit.beans.LoginStatusBean;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.jena.DependentResourceDeleteJena;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.EditEvent;
import edu.cornell.mannlib.vitro.webapp.edit.n3editing.EditN3Utils;
public class PrimitiveRdfEdit extends FreemarkerHttpServlet{
private static final long serialVersionUID = 1L;
@Override
protected String getTitle(String siteName) {
return "RDF edit";
}
@Override
protected int requiredLoginLevel() {
return LoginStatusBean.EDITOR;
}
@Override
protected ResponseValues processRequest(VitroRequest vreq) {
return new TemplateResponseValues("primitiveRdfEdit.ftl");
}
@Override
public void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
VitroRequest vreq = new VitroRequest(request);
if( !LoginStatusBean.getBean(request).isLoggedIn()){
doError(response,"You must be logged in to use this servlet.",HttpStatus.SC_UNAUTHORIZED);
return;
}
// PolicyIface policy = RequestPolicyList.getPolicies( request );
//
// if( policy == null || ( policy instanceof PolicyList && ((PolicyList)policy).size() == 0 )){
// policy = ServletPolicyList.getPolicies( getServletContext() );
// if( policy == null || ( policy instanceof PolicyList && ((PolicyList)policy).size() == 0 )){
// log.debug("No policy found in request at " + RequestPolicyList.POLICY_LIST);
// doError(response, "no policy found.",500);
// return;
// }
// }
//
// IdentifierBundle ids = (IdentifierBundle)ServletIdentifierBundleFactory
// .getIdBundleForRequest(request,request.getSession(false),getServletContext());
//
// if( ids == null ){
// log.error("No IdentifierBundle objects for request");
// doError(response,"no identifiers found",500);
// return;
// }
processRequest(vreq, response);
}
protected void processRequest(VitroRequest vreq, HttpServletResponse response) {
//Test error case
/*
if (1==1) {
doError(response, "Test error", 500);
return;
} */
/* Predefined values for RdfFormat are "RDF/XML",
* "N-TRIPLE", "TURTLE" (or "TTL") and "N3". null represents
* the default language, "RDF/XML". "RDF/XML-ABBREV" is a synonym for "RDF/XML" */
String format = vreq.getParameter("RdfFormat");
if( format == null )
format = "N3";
if ( ! ("N-TRIPLE".equals(format) || "TURTLE".equals(format) || "TTL".equals(format)
|| "N3".equals(format)|| "RDF/XML-ABBREV".equals(format) || "RDF/XML".equals(format) )){
doError(response,"RdfFormat was not recognized.",500);
return;
}
//parse RDF
Set<Model> additions= null;
try {
additions = parseRdfParam(vreq.getParameterValues("additions"),format);
} catch (Exception e) {
doError(response,"Error reading RDF, set log level to debug for this class to get error messages in the server logs.",HttpStatus.SC_BAD_REQUEST);
return;
}
Set<Model> retractions = null;
try {
retractions = parseRdfParam(vreq.getParameterValues("retractions"),format);
} catch (Exception e) {
doError(response,"Error reading RDF, set log level to debug for this class to get error messages in the server logs.",HttpStatus.SC_BAD_REQUEST);
return;
}
//check permissions
//TODO: (bdc34)This is not yet implemented, must check the IDs against the policies for permissons before doing an edit!
// rjy7 put policy check in separate method so subclasses can inherit
boolean hasPermission = true;
if( !hasPermission ){
//if not okay, send error message
doError(response,"Insufficent permissions.",HttpStatus.SC_UNAUTHORIZED);
return;
}
ServletContext sc = getServletContext();
String editorUri = EditN3Utils.getEditorUri(vreq, vreq.getSession(false), sc);
try {
processChanges( additions, retractions, getWriteModel(vreq),getQueryModel(vreq), editorUri);
} catch (Exception e) {
doError(response,e.getMessage(),HttpStatus.SC_INTERNAL_SERVER_ERROR);
}
}
protected void processChanges(Set<Model> additions, Set<Model> retractions, OntModel writeModel, OntModel queryModel, String editorURI ) throws Exception{
Model a = com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel();
for(Model m : additions)
a.add(m);
Model r = com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel();
for(Model m : retractions)
r.add(m);
processChanges(a,r,writeModel,queryModel,editorURI);
}
protected void processChanges(Model additions, Model retractions, OntModel writeModel, OntModel queryModel, String editorURI ) throws Exception{
/*
* Do a diff on the additions and retractions and then only add the delta to the jenaOntModel.
*/
Model assertionsAdded = additions.difference( retractions );
Model assertionsRetracted = retractions.difference( additions );
Model depResRetractions =
DependentResourceDeleteJena
.getDependentResourceDeleteForChange(assertionsAdded, assertionsRetracted, queryModel);
assertionsRetracted.add( depResRetractions );
Lock lock = null;
try{
lock = writeModel.getLock();
lock.enterCriticalSection(Lock.WRITE);
writeModel.getBaseModel().notifyEvent(new EditEvent(editorURI,true));
writeModel.add( assertionsAdded );
writeModel.remove( assertionsRetracted );
}catch(Throwable t){
throw new Exception("Error while modifying model \n" + t.getMessage());
}finally{
writeModel.getBaseModel().notifyEvent(new EditEvent(editorURI,false));
lock.leaveCriticalSection();
}
}
/**
* Convert the values from a parameters into RDF models.
* @param parameters - the result of request.getParameters(String)
* @param format - a valid format string for Jena's Model.read()
* @return
* @throws Exception
*/
protected Set<Model> parseRdfParam(String[] parameters, String format) throws Exception{
Set<Model> models = new HashSet<Model>();
for( String param : parameters){
try{
StringReader reader = new StringReader(param);
Model model = com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel();
model.read(reader, null, format);
models.add(model);
}catch(Error ex){
log.error("Error reading RDF as " + format + " in " + param);
throw new Exception("Error reading RDF, set log level to debug for this class to get error messages in the sever logs.");
}
}
return models;
}
protected void doError(HttpServletResponse response, String errorMsg, int httpstatus){
response.setStatus(httpstatus);
try {
response.getWriter().write(errorMsg);
} catch (IOException e) {
log.debug("IO exception during output",e );
}
}
protected OntModel getWriteModel(HttpServletRequest request){
HttpSession session = request.getSession(false);
if( session == null || session.getAttribute("jenaOntModel") == null )
return (OntModel)getServletContext().getAttribute("jenaOntModel");
else
return (OntModel)session.getAttribute("jenaOntModel");
}
protected OntModel getQueryModel(HttpServletRequest request){
return getWriteModel(request);
}
Log log = LogFactory.getLog(PrimitiveRdfEdit.class.getName());
static public boolean checkLoginStatus(HttpServletRequest request){
return LoginStatusBean.getBean(request).isLoggedIn();
}
}
| webapp/src/edu/cornell/mannlib/vitro/webapp/controller/edit/PrimitiveRdfEdit.java | /* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller.edit;
import java.io.IOException;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.shared.Lock;
import edu.cornell.mannlib.vedit.beans.LoginStatusBean;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder.Route;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.jena.DependentResourceDeleteJena;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.EditEvent;
import edu.cornell.mannlib.vitro.webapp.edit.n3editing.EditN3Utils;
public class PrimitiveRdfEdit extends FreemarkerHttpServlet{
private static final long serialVersionUID = 1L;
@Override
protected String getTitle(String siteName) {
return "RDF edit";
}
@Override
protected int requiredLoginLevel() {
return LoginStatusBean.EDITOR;
}
@Override
protected ResponseValues processRequest(VitroRequest vreq) {
return new TemplateResponseValues("primitiveRdfEdit.ftl");
}
@Override
public void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
VitroRequest vreq = new VitroRequest(request);
boolean loggedIn = checkLoginStatus(request);
if( !loggedIn){
doError(response,"You must be logged in to use this servlet.",HttpStatus.SC_UNAUTHORIZED);
return;
}
// PolicyIface policy = RequestPolicyList.getPolicies( request );
//
// if( policy == null || ( policy instanceof PolicyList && ((PolicyList)policy).size() == 0 )){
// policy = ServletPolicyList.getPolicies( getServletContext() );
// if( policy == null || ( policy instanceof PolicyList && ((PolicyList)policy).size() == 0 )){
// log.debug("No policy found in request at " + RequestPolicyList.POLICY_LIST);
// doError(response, "no policy found.",500);
// return;
// }
// }
//
// IdentifierBundle ids = (IdentifierBundle)ServletIdentifierBundleFactory
// .getIdBundleForRequest(request,request.getSession(false),getServletContext());
//
// if( ids == null ){
// log.error("No IdentifierBundle objects for request");
// doError(response,"no identifiers found",500);
// return;
// }
processRequest(vreq, response);
}
protected void processRequest(VitroRequest vreq, HttpServletResponse response) {
//Test error case
/*
if (1==1) {
doError(response, "Test error", 500);
return;
} */
/* Predefined values for RdfFormat are "RDF/XML",
* "N-TRIPLE", "TURTLE" (or "TTL") and "N3". null represents
* the default language, "RDF/XML". "RDF/XML-ABBREV" is a synonym for "RDF/XML" */
String format = vreq.getParameter("RdfFormat");
if( format == null )
format = "N3";
if ( ! ("N-TRIPLE".equals(format) || "TURTLE".equals(format) || "TTL".equals(format)
|| "N3".equals(format)|| "RDF/XML-ABBREV".equals(format) || "RDF/XML".equals(format) )){
doError(response,"RdfFormat was not recognized.",500);
return;
}
//parse RDF
Set<Model> additions= null;
try {
additions = parseRdfParam(vreq.getParameterValues("additions"),format);
} catch (Exception e) {
doError(response,"Error reading RDF, set log level to debug for this class to get error messages in the server logs.",HttpStatus.SC_BAD_REQUEST);
return;
}
Set<Model> retractions = null;
try {
retractions = parseRdfParam(vreq.getParameterValues("retractions"),format);
} catch (Exception e) {
doError(response,"Error reading RDF, set log level to debug for this class to get error messages in the server logs.",HttpStatus.SC_BAD_REQUEST);
return;
}
//check permissions
//TODO: (bdc34)This is not yet implemented, must check the IDs against the policies for permissons before doing an edit!
// rjy7 put policy check in separate method so subclasses can inherit
boolean hasPermission = true;
if( !hasPermission ){
//if not okay, send error message
doError(response,"Insufficent permissions.",HttpStatus.SC_UNAUTHORIZED);
return;
}
ServletContext sc = getServletContext();
String editorUri = EditN3Utils.getEditorUri(vreq, vreq.getSession(false), sc);
try {
processChanges( additions, retractions, getWriteModel(vreq),getQueryModel(vreq), editorUri);
} catch (Exception e) {
doError(response,e.getMessage(),HttpStatus.SC_INTERNAL_SERVER_ERROR);
}
}
protected void processChanges(Set<Model> additions, Set<Model> retractions, OntModel writeModel, OntModel queryModel, String editorURI ) throws Exception{
Model a = com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel();
for(Model m : additions)
a.add(m);
Model r = com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel();
for(Model m : retractions)
r.add(m);
processChanges(a,r,writeModel,queryModel,editorURI);
}
protected void processChanges(Model additions, Model retractions, OntModel writeModel, OntModel queryModel, String editorURI ) throws Exception{
/*
* Do a diff on the additions and retractions and then only add the delta to the jenaOntModel.
*/
Model assertionsAdded = additions.difference( retractions );
Model assertionsRetracted = retractions.difference( additions );
Model depResRetractions =
DependentResourceDeleteJena
.getDependentResourceDeleteForChange(assertionsAdded, assertionsRetracted, queryModel);
assertionsRetracted.add( depResRetractions );
Lock lock = null;
try{
lock = writeModel.getLock();
lock.enterCriticalSection(Lock.WRITE);
writeModel.getBaseModel().notifyEvent(new EditEvent(editorURI,true));
writeModel.add( assertionsAdded );
writeModel.remove( assertionsRetracted );
}catch(Throwable t){
throw new Exception("Error while modifying model \n" + t.getMessage());
}finally{
writeModel.getBaseModel().notifyEvent(new EditEvent(editorURI,false));
lock.leaveCriticalSection();
}
}
/**
* Convert the values from a parameters into RDF models.
* @param parameters - the result of request.getParameters(String)
* @param format - a valid format string for Jena's Model.read()
* @return
* @throws Exception
*/
protected Set<Model> parseRdfParam(String[] parameters, String format) throws Exception{
Set<Model> models = new HashSet<Model>();
for( String param : parameters){
try{
StringReader reader = new StringReader(param);
Model model = com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel();
model.read(reader, null, format);
models.add(model);
}catch(Error ex){
log.error("Error reading RDF as " + format + " in " + param);
throw new Exception("Error reading RDF, set log level to debug for this class to get error messages in the sever logs.");
}
}
return models;
}
protected void doError(HttpServletResponse response, String errorMsg, int httpstatus){
response.setStatus(httpstatus);
try {
response.getWriter().write(errorMsg);
} catch (IOException e) {
log.debug("IO exception during output",e );
}
}
protected OntModel getWriteModel(HttpServletRequest request){
HttpSession session = request.getSession(false);
if( session == null || session.getAttribute("jenaOntModel") == null )
return (OntModel)getServletContext().getAttribute("jenaOntModel");
else
return (OntModel)session.getAttribute("jenaOntModel");
}
protected OntModel getQueryModel(HttpServletRequest request){
return getWriteModel(request);
}
Log log = LogFactory.getLog(PrimitiveRdfEdit.class.getName());
static public boolean checkLoginStatus(HttpServletRequest request){
return LoginStatusBean.getBean(request).isLoggedIn();
}
}
| NIHVIVO-1363 restrict the PrimitiveRdfEdit servlet and its sub-classes to users who are at least logged in.
| webapp/src/edu/cornell/mannlib/vitro/webapp/controller/edit/PrimitiveRdfEdit.java | NIHVIVO-1363 restrict the PrimitiveRdfEdit servlet and its sub-classes to users who are at least logged in. |
|
Java | bsd-3-clause | 0aeb527d8c730c042a7774d94cca1197b03eb7be | 0 | pughlab/tracker,pughlab/tracker,pughlab/tracker,pughlab/tracker,pughlab/tracker | package ca.uhnresearch.pughlab.tracker.dao.impl;
import static org.hamcrest.Matchers.containsString;
import java.sql.Timestamp;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import static org.easymock.EasyMock.*;
import org.hamcrest.Matchers;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.junit.Rule;
import org.junit.Test;
import org.junit.Assert;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.jdbc.query.QueryDslJdbcTemplate;
import org.springframework.data.jdbc.query.SqlInsertWithKeyCallback;
import org.springframework.data.jdbc.query.SqlUpdateCallback;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mysema.query.sql.RelationalPath;
import com.mysema.query.sql.SQLQuery;
import com.mysema.query.types.Expression;
import ca.uhnresearch.pughlab.tracker.dao.CasePager;
import ca.uhnresearch.pughlab.tracker.dao.InvalidValueException;
import ca.uhnresearch.pughlab.tracker.dao.NotFoundException;
import ca.uhnresearch.pughlab.tracker.dao.RepositoryException;
import ca.uhnresearch.pughlab.tracker.dao.StudyCaseQuery;
import ca.uhnresearch.pughlab.tracker.domain.QAuditLog;
import ca.uhnresearch.pughlab.tracker.domain.QCases;
import ca.uhnresearch.pughlab.tracker.dto.Attributes;
import ca.uhnresearch.pughlab.tracker.dto.AuditLogRecord;
import ca.uhnresearch.pughlab.tracker.dto.Cases;
import ca.uhnresearch.pughlab.tracker.dto.Study;
import ca.uhnresearch.pughlab.tracker.dto.View;
import ca.uhnresearch.pughlab.tracker.dto.ViewAttributes;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath:**/testContextDatabase.xml" })
public class StudyRepositoryImplTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@SuppressWarnings("unused")
private final Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
private StudyRepositoryImpl studyRepository;
@Autowired
private AuditLogRepositoryImpl auditLogRepository;
private JsonNodeFactory jsonNodeFactory = JsonNodeFactory.instance;
private static ObjectMapper objectMapper = new ObjectMapper();
@Test
public void testWiring() {
Assert.assertNotNull(studyRepository);
}
@Test
@Transactional
@Rollback(true)
public void testGetStudy() {
Study s = studyRepository.getStudy("DEMO");
Assert.assertNotNull(s);
Assert.assertEquals("DEMO", s.getName());
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyOptions() {
Study s = studyRepository.getStudy("DEMO");
Assert.assertNotNull(s);
Assert.assertNotNull(s.getOptions());
Assert.assertTrue(s.getOptions().has("stateLabels"));
Assert.assertTrue(s.getOptions().get("stateLabels").isObject());
Assert.assertTrue(s.getOptions().get("stateLabels").has("pending"));
Assert.assertTrue(s.getOptions().get("stateLabels").get("pending").isTextual());
Assert.assertEquals("label1", s.getOptions().get("stateLabels").get("pending").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSaveStudyNew() {
Study s = new Study();
s.setName("TEST");
s.setDescription("A test study");
studyRepository.saveStudy(s);
Assert.assertNotNull(s);
Assert.assertNotNull(s.getId());
Study second = studyRepository.getStudy("TEST");
Assert.assertNotNull(second);
Assert.assertEquals("TEST", second.getName());
Assert.assertEquals("A test study", second.getDescription());
}
@Test
@Transactional
@Rollback(true)
public void testSaveStudyUpdate() {
Study s = studyRepository.getStudy("DEMO");
s.setDescription("Another test");
Study result = studyRepository.saveStudy(s);
Assert.assertNotNull(result);
Assert.assertNotNull(result.getId());
Assert.assertEquals(result.getId(), s.getId());
Study second = studyRepository.getStudy("DEMO");
Assert.assertNotNull(second);
Assert.assertEquals("Another test", second.getDescription());
}
@Test
@Transactional
@Rollback(true)
public void testGetMissingStudy() {
Study s = studyRepository.getStudy("DEMOX");
Assert.assertNull(s);
}
@Test
@Transactional
@Rollback(true)
public void testGetStudies() {
List<Study> list = studyRepository.getAllStudies();
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyViews() {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyView() {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "complete");
Assert.assertNotNull(v);
Assert.assertEquals("complete", v.getName());
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyViewOptions() {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "secondary");
Assert.assertNotNull(v);
Assert.assertEquals("secondary", v.getName());
Assert.assertNotNull(v.getOptions());
Assert.assertNotNull(v.getOptions().get("rows"));
Assert.assertTrue(v.getOptions().get("rows").isArray());
Assert.assertEquals(1, v.getOptions().get("rows").size());
Assert.assertNotNull(v.getOptions().get("rows").get(0));
Assert.assertTrue(v.getOptions().get("rows").get(0).isObject());
Assert.assertEquals("study", v.getOptions().get("rows").get(0).get("attribute").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSetStudyViewOptions() {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "track");
Assert.assertNotNull(v);
Assert.assertEquals("track", v.getName());
ObjectNode viewOptions = objectMapper.createObjectNode();
ObjectNode viewOptionDescriptor = objectMapper.createObjectNode();
ArrayNode viewArray = objectMapper.createArrayNode();
viewOptionDescriptor.put("attribute", "dateEntered");
viewOptionDescriptor.put("value", "test");
viewArray.add(viewOptionDescriptor);
viewOptions.set("rows", viewArray);
v.setOptions(viewOptions);
try {
studyRepository.setStudyView(study, v);
} catch (RepositoryException e) {
Assert.fail();
}
View modifiedView = studyRepository.getStudyView(study, "track");
Assert.assertNotNull(modifiedView.getOptions());
Assert.assertNotNull(modifiedView.getOptions().get("rows"));
Assert.assertTrue(modifiedView.getOptions().get("rows").isArray());
Assert.assertEquals(1, modifiedView.getOptions().get("rows").size());
Assert.assertNotNull(modifiedView.getOptions().get("rows").get(0));
Assert.assertTrue(modifiedView.getOptions().get("rows").get(0).isObject());
Assert.assertEquals("dateEntered", modifiedView.getOptions().get("rows").get(0).get("attribute").asText());
Assert.assertEquals("test", modifiedView.getOptions().get("rows").get(0).get("value").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSetStudyViewOptionsInvalidView() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "track");
Assert.assertNotNull(v);
Assert.assertEquals("track", v.getName());
ObjectNode viewOptions = objectMapper.createObjectNode();
ObjectNode viewOptionDescriptor = objectMapper.createObjectNode();
ArrayNode viewArray = objectMapper.createArrayNode();
viewOptionDescriptor.put("attribute", "dateEntered");
viewOptionDescriptor.put("value", "test");
viewArray.add(viewOptionDescriptor);
viewOptions.set("rows", viewArray);
v.setOptions(viewOptions);
v.setStudyId(100);
thrown.expect(NotFoundException.class);
thrown.expectMessage(containsString("Can't update view for a different study"));
studyRepository.setStudyView(study, v);
}
@Test
@Transactional
@Rollback(true)
public void testGetMissingStudyView() {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "completed");
Assert.assertNull(v);
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyAttributes() {
Study study = studyRepository.getStudy("DEMO");
List<Attributes> list = studyRepository.getStudyAttributes(study);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetViewAttributes() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testSmallerGetViewAttributes() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetData() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(10);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(10, list.size());
}
/**
* Regression test for #53 -- checks that only lrgitimate view attributes are
* returned.
*/
@Test
@Transactional
@Rollback(true)
public void testGetDataSecurity() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(10);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(10, list.size());
Assert.assertFalse(list.get(0).has("mrn"));
}
@Test
@Transactional
@Rollback(true)
public void testGetDataNoLimit() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(null);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(20, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetDataNoOffset() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(null);
pager.setLimit(5);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(5, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetDataOrdered() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(5);
pager.setOrderField("consentDate");
pager.setOrderDirection(CasePager.OrderDirection.DESC);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(5, list.size());
}
/**
* Checks that when an attribute filter is applied, only the specified attributes are returned.
*/
@Test
@Transactional
@Rollback(true)
public void testGetDataFiltered() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> attributes = studyRepository.getViewAttributes(study, view);
List<ViewAttributes> filteredAttributes = attributes.subList(0, 3);
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(null);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, filteredAttributes);
Assert.assertNotNull(list);
Assert.assertEquals(20, list.size());
for(int i = 0; i < 5; i++) {
Assert.assertFalse(list.get(i).has("physician"));
Assert.assertFalse(list.get(i).has("tissueSite"));
Assert.assertFalse(list.get(i).has("specimenAvailable"));
}
}
@Test
@Transactional
@Rollback(true)
public void testRecordCount() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
Long count = studyRepository.getRecordCount(study, view);
Assert.assertEquals(20, count.intValue());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCase() {
Study study = studyRepository.getStudy("DEMO");
Cases caseValue = studyRepository.getStudyCase(study, 1);
Assert.assertNotNull(caseValue);
Assert.assertEquals(1, caseValue.getId().intValue());
}
@Test
@Transactional
@Rollback(true)
public void testSingleMissingCase() {
Study study = studyRepository.getStudy("DEMO");
Cases caseValue = studyRepository.getStudyCase(study, 100);
Assert.assertNull(caseValue);
}
@Test
@Transactional
@Rollback(true)
public void testSingleFromDifferentStudy() {
Study study = studyRepository.getStudy("DEMO");
Cases caseValue = studyRepository.getStudyCase(study, 22);
Assert.assertNull(caseValue);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseValues() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(1, data.size());
ObjectNode single = data.get(0);
String date = single.get("dateEntered").asText();
Assert.assertNotNull(date);
Assert.assertEquals("2014-08-20", date);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseStateNull() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> dataList = studyRepository.getCaseData(query, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("$state"));
Assert.assertTrue(data.get("$state").isNull());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseStatePending() {
Study study = studyRepository.getStudy("SECOND");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 21);
List<ObjectNode> dataList = studyRepository.getCaseData(query, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("$state"));
Assert.assertTrue(data.get("$state").isTextual());
Assert.assertEquals("pending", data.get("$state").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseNumberValues() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(1, data.size());
ObjectNode single = data.get(0);
Assert.assertTrue(single.has("numberCores"));
Double cores = single.get("numberCores").asDouble();
Assert.assertNotNull(cores);
Assert.assertTrue(Math.abs(cores - 2.0) < 0.00000001);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseValuesNotes() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(1, data.size());
ObjectNode single = data.get(0);
JsonNode notes = single.get("$notes");
Assert.assertNotNull(notes);
// No notes here
Assert.assertNull(notes.get("specimenAvailable"));
// Notes here
JsonNode consentDateNotes = notes.get("consentDate");
Assert.assertNotNull(consentDateNotes);
JsonNode consentDateLocked = consentDateNotes.get("locked");
Assert.assertNotNull(consentDateLocked);
Assert.assertTrue(consentDateLocked.isBoolean());
Assert.assertTrue(consentDateLocked.asBoolean());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseValuesFiltered() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> attributes = studyRepository.getViewAttributes(study, view);
List<ViewAttributes> filteredAttributes = attributes.subList(0, 3);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> dataList = studyRepository.getCaseData(query, filteredAttributes);
Assert.assertEquals(1, dataList.size());
ObjectNode data = dataList.get(0);
Assert.assertNotNull(data);
for(ViewAttributes va : attributes) {
Boolean filtered = filteredAttributes.contains(va);
Assert.assertEquals("Failed to filter attribute: " + va.getName(),filtered, data.has(va.getName()));
}
}
private ObjectNode getCaseAttributeValue(Study study, View view, Integer caseId) {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, caseId);
List<ObjectNode> dataList = studyRepository.getCaseData(query, view);
Assert.assertEquals(1, dataList.size());
return dataList.get(0);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeValues() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
ObjectNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertEquals("2014-08-20", data.get("dateEntered").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeValuesNotAvailable() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
ObjectNode data = getCaseAttributeValue(study, view, 2);
Assert.assertTrue(data.get("trackerDate").isObject());
Assert.assertTrue(data.get("trackerDate").has("$notAvailable"));
Assert.assertEquals("true", data.get("trackerDate").get("$notAvailable").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeMissing() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
ObjectNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertFalse(data.has("bloodCollDate"));
}
@Test
@Transactional
@Rollback(true)
public void testAuditLog() {
Study study = studyRepository.getStudy("DEMO");
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(0, auditEntries.size());
}
@Test
@Transactional
@Rollback(true)
public void testAuditLogWithNoLimits() {
Study study = studyRepository.getStudy("DEMO");
CasePager query = new CasePager();
query.setOffset(null);
query.setLimit(null);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(0, auditEntries.size());
}
@Test
@Transactional
@Rollback(true)
public void testAuditLogWithBadData() {
Study study = studyRepository.getStudy("DEMO");
CasePager query = new CasePager();
query.setOffset(null);
query.setLimit(null);
List<AuditLogRecord> data = new ArrayList<AuditLogRecord>();
AuditLogRecord entry = new AuditLogRecord();
entry.setEventTime(Timestamp.from(Instant.now()));
entry.setEventArgs("{");
data.add(entry);
QueryDslJdbcTemplate originalTemplate = studyRepository.getTemplate();
QueryDslJdbcTemplate mockTemplate = createMock(QueryDslJdbcTemplate.class);
expect(mockTemplate.newSqlQuery()).andStubReturn(originalTemplate.newSqlQuery());
expect(mockTemplate.query(anyObject(SQLQuery.class), anyObject(QAuditLog.class))).andStubReturn(data);
replay(mockTemplate);
studyRepository.setTemplate(mockTemplate);
List<JsonNode> auditEntries = null;
try {
auditEntries = auditLogRepository.getAuditData(study, query);
} finally {
studyRepository.setTemplate(originalTemplate);
}
Assert.assertNotNull(auditEntries);
}
@Test
@Transactional
@Rollback(true)
public void testAuditLogWithGoodData() {
Study study = studyRepository.getStudy("DEMO");
CasePager query = new CasePager();
query.setOffset(null);
query.setLimit(null);
List<AuditLogRecord> data = new ArrayList<AuditLogRecord>();
AuditLogRecord entry = new AuditLogRecord();
entry.setEventTime(Timestamp.from(Instant.now()));
entry.setEventArgs("{\"old\":null,\"value\":100}");
data.add(entry);
QueryDslJdbcTemplate originalTemplate = studyRepository.getTemplate();
QueryDslJdbcTemplate mockTemplate = createMock(QueryDslJdbcTemplate.class);
expect(mockTemplate.newSqlQuery()).andStubReturn(originalTemplate.newSqlQuery());
expect(mockTemplate.query(anyObject(SQLQuery.class), anyObject(QAuditLog.class))).andStubReturn(data);
replay(mockTemplate);
studyRepository.setTemplate(mockTemplate);
List<JsonNode> auditEntries = null;
try {
auditEntries = auditLogRepository.getAuditData(study, query);
} finally {
studyRepository.setTemplate(originalTemplate);
}
Assert.assertNotNull(auditEntries);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueDate() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("dateEntered", jsonNodeFactory.nullNode());
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("dateEntered", entry.get("attribute").asText());
Assert.assertEquals("2014-08-20", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isNull());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
ObjectNode data = getCaseAttributeValue(study, view, 1);
Assert.assertTrue(data.has("dateEntered"));
Assert.assertTrue(data.get("dateEntered").isNull());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueDateInsert() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 6);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("procedureDate", jsonNodeFactory.textNode("2014-02-03"));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail(e.getMessage());
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("procedureDate", entry.get("attribute").asText());
Assert.assertTrue(entry.get("eventArgs").get("old").isNull());
Assert.assertEquals("2014-02-03", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too.
ObjectNode data = getCaseAttributeValue(study, view, 6);
Assert.assertTrue(data.has("procedureDate"));
Assert.assertEquals("2014-02-03", data.get("procedureDate").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueString() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("patientId", jsonNodeFactory.textNode("DEMO-XX"));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("patientId", entry.get("attribute").asText());
Assert.assertEquals("DEMO-01", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("DEMO-XX", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-XX", data.get("patientId").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueStringInsert() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 10);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenNo", jsonNodeFactory.textNode("SMP-XX"));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenNo", entry.get("attribute").asText());
Assert.assertTrue(entry.get("eventArgs").get("old").isNull());
Assert.assertEquals("SMP-XX", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 10);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenNo"));
Assert.assertEquals("SMP-XX", data.get("specimenNo").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueStringNull() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("patientId", jsonNodeFactory.nullNode());
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("patientId", entry.get("attribute").asText());
Assert.assertEquals("DEMO-01", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isNull());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertTrue(data.get("patientId").isNull());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueOption() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.put("sampleAvailable", "St. Michaels");
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("sampleAvailable", entry.get("attribute").asText());
Assert.assertEquals("LMP", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("St. Michaels", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("sampleAvailable"));
Assert.assertEquals("St. Michaels", data.get("sampleAvailable").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBoolean() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.put("specimenAvailable", false);
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("false", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertTrue(data.get("specimenAvailable").isBoolean());
Assert.assertEquals("false", data.get("specimenAvailable").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBooleanValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.textNode("BAD"));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid boolean"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueStringValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("patientId", jsonNodeFactory.booleanNode(false));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid string"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueDateValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("dateEntered", jsonNodeFactory.booleanNode(false));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid date"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueDateValueFormatError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("dateEntered", jsonNodeFactory.textNode("2015-02-XX"));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid date"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueOptionValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("sampleAvailable", jsonNodeFactory.booleanNode(false));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid string"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueOptionUnexpectedValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("sampleAvailable", jsonNodeFactory.textNode("BAD"));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid string"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
// Regression test for #6 -- check that multiple writes are handled correctly.
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBooleanTwice() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.booleanNode(false));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.booleanNode(true));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(2, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("false", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertEquals("true", data.get("specimenAvailable").asText());
}
// Regression test for #6 -- check that multiple writes are handled correctly.
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteSameValueBooleanTwice() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.booleanNode(false));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.booleanNode(false));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, pager);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("false", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertEquals("false", data.get("specimenAvailable").asText());
}
// Regression test for #7 -- check that N/A writes are handled correctly.
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBooleanNotAvailable() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
ObjectNode notAvailable = objectMapper.createObjectNode();
notAvailable.put("$notAvailable", true);
values.replace("specimenAvailable", notAvailable);
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isObject());
Assert.assertEquals(true, entry.get("eventArgs").get("new").get("$notAvailable").asBoolean());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertTrue(data.get("specimenAvailable").isObject());
Assert.assertEquals(true, data.get("specimenAvailable").get("$notAvailable").asBoolean());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBooleanNull() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.nullNode());
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isNull());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertTrue(data.get("specimenAvailable").isNull());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteNonExistentValue() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 15);
ObjectNode values = jsonNodeFactory.objectNode();
ObjectNode notAvailable = objectMapper.createObjectNode();
notAvailable.put("$notAvailable", true);
values.replace("specimenAvailable", notAvailable);
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("null", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isObject());
Assert.assertEquals(true, entry.get("eventArgs").get("new").get("$notAvailable").asBoolean());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 15);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertTrue(data.get("specimenAvailable").isObject());
Assert.assertEquals(true, data.get("specimenAvailable").get("$notAvailable").asBoolean());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteMissingAttribute() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Attributes attribute = studyRepository.getStudyAttribute(study, "dateEnteredX");
Assert.assertNull(attribute);
}
/**
* Simple test of writing the exact same attributes back into the study. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testSetStudyAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<Attributes> list = studyRepository.getStudyAttributes(study);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
studyRepository.setStudyAttributes(study, list);
List<Attributes> listAgain = studyRepository.getStudyAttributes(study);
Assert.assertEquals(listAgain.size(), list.size());
int size = list.size();
for(int i = 0; i < size; i++) {
Attributes oldAttribute = list.get(i);
Attributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
}
/**
* Simple test of deleting a number of attributes.
*/
@Test
@Transactional
@Rollback(true)
public void testDeleteStudyAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<Attributes> list = studyRepository.getStudyAttributes(study);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
studyRepository.setStudyAttributes(study, list.subList(0, 10));
List<Attributes> listAgain = studyRepository.getStudyAttributes(study);
Assert.assertEquals(10, listAgain.size());
for(int i = 0; i < 10; i++) {
Attributes oldAttribute = list.get(i);
Attributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testAddStudyAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<Attributes> list = studyRepository.getStudyAttributes(study);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
Attributes att1 = new Attributes();
att1.setName("test");
att1.setType("string");
att1.setLabel("Test");
att1.setDescription("First test attribute");
List<Attributes> modified = list.subList(0, 10);
modified.add(att1);
studyRepository.setStudyAttributes(study, modified);
List<Attributes> listAgain = studyRepository.getStudyAttributes(study);
Assert.assertEquals(11, listAgain.size());
for(int i = 0; i < 10; i++) {
Attributes oldAttribute = list.get(i);
Attributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
Attributes loadedAtt1 = listAgain.get(10);
// Cheatily clear the id, so we can compare all other fields
loadedAtt1.setId(null);
Assert.assertTrue(EqualsBuilder.reflectionEquals(att1, loadedAtt1));
}
/**
* Simple test of writing the exact same attributes back into the study. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testSetStudyViews() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
studyRepository.setStudyViews(study, list);
List<View> listAgain = studyRepository.getStudyViews(study);
Assert.assertEquals(listAgain.size(), list.size());
int size = list.size();
for(int i = 0; i < size; i++) {
View oldView = list.get(i);
View newView = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldView, newView));
}
}
/**
* Simple test of writing the exact same attributes back into the study. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testSetStudyViewsUpdateKey() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
View oldView = list.remove(2);
View newView = new View();
newView.setId(oldView.getId());
newView.setStudyId(oldView.getStudyId());
newView.setOptions(oldView.getOptions());
newView.setName("testView");
newView.setDescription("Test View");
list.add(newView);
Assert.assertEquals(3, list.size());
studyRepository.setStudyViews(study, list);
List<View> listAgain = studyRepository.getStudyViews(study);
Assert.assertEquals(listAgain.size(), list.size());
int size = list.size();
for(int i = 0; i < size; i++) {
View oldViewRead = list.get(i);
View newViewREad = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldViewRead, newViewREad));
}
}
/**
* Simple test of deleting a view.
*/
@Test
@Transactional
@Rollback(true)
public void testDeleteStudyView() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
studyRepository.setStudyViews(study, list.subList(0, 2));
List<View> listAgain = studyRepository.getStudyViews(study);
Assert.assertEquals(2, listAgain.size());
for(int i = 0; i < 2; i++) {
View oldView = list.get(i);
View newView = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldView, newView));
}
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testAddStudyViews() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
View v1 = new View();
v1.setName("test");
v1.setDescription("First test attribute");
List<View> modified = list.subList(0, 2);
modified.add(v1);
studyRepository.setStudyViews(study, modified);
List<View> listAgain = studyRepository.getStudyViews(study);
Assert.assertEquals(3, listAgain.size());
for(int i = 0; i < 2; i++) {
View oldAttribute = list.get(i);
View newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
View loadedV1 = listAgain.get(2);
// Cheatily clear the id, so we can compare all other fields
loadedV1.setId(null);
Assert.assertTrue(EqualsBuilder.reflectionEquals(v1, loadedV1));
}
/**
* Simple test of writing the exact same attributes back into the view. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testSetViewAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
studyRepository.setViewAttributes(study, view, list);
List<ViewAttributes> listAgain = studyRepository.getViewAttributes(study, view);
Assert.assertEquals(listAgain.size(), list.size());
int size = list.size();
for(int i = 0; i < size; i++) {
ViewAttributes oldAttribute = list.get(i);
ViewAttributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
}
/**
* Simple test of deleting a number of attributes.
*/
@Test
@Transactional
@Rollback(true)
public void testDeleteViewAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
studyRepository.setViewAttributes(study, view, list.subList(0, 10));
List<ViewAttributes> listAgain = studyRepository.getViewAttributes(study, view);
Assert.assertEquals(10, listAgain.size());
for(int i = 0; i < 10; i++) {
ViewAttributes oldAttribute = list.get(i);
ViewAttributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testAddViewAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
ViewAttributes att1 = new ViewAttributes();
att1.setId(8);
att1.setName("specimenNo");
att1.setType("string");
att1.setLabel("Specimen #");
att1.setStudyId(study.getId());
List<ViewAttributes> modified = list.subList(0, 10);
modified.add(att1);
studyRepository.setViewAttributes(study, view, modified);
List<ViewAttributes> listAgain = studyRepository.getViewAttributes(study, view);
Assert.assertEquals(11, listAgain.size());
for(int i = 0; i < 10; i++) {
Attributes oldAttribute = list.get(i);
Attributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
Attributes loadedAtt1 = listAgain.get(10);
// Cheatily clear the id, so we can compare all other fields
loadedAtt1.setId(att1.getId());
loadedAtt1.setRank(att1.getRank());
Assert.assertTrue(EqualsBuilder.reflectionEquals(att1, loadedAtt1));
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testAddMissingViewAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
ViewAttributes att1 = new ViewAttributes();
att1.setId(600);
att1.setName("unknown");
att1.setType("string");
att1.setLabel("Specimen #");
att1.setStudyId(study.getId());
List<ViewAttributes> modified = list.subList(0, 10);
modified.add(att1);
thrown.expect(NotFoundException.class);
thrown.expectMessage(containsString("Missing attribute"));
studyRepository.setViewAttributes(study, view, modified);
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testNewCase() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Cases newCase = studyRepository.newStudyCase(study, "test");
Assert.assertNotNull(newCase);
Assert.assertNotNull(newCase.getId());
Assert.assertNotNull(newCase.getStudyId());
// And now let's dig out the new case -- mainly to check that we can actually
// follow this identifier.
Cases caseValue = studyRepository.getStudyCase(study, newCase.getId());
Assert.assertNotNull(caseValue);
Assert.assertEquals(newCase.getId(), caseValue.getId());
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testNewCaseOrdering() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Cases foundCase = studyRepository.getStudyCase(study, 10);
Integer foundCaseOrder = foundCase.getOrder();
Cases newCase = studyRepository.newStudyCase(study, "test", foundCase);
Assert.assertNotNull(newCase);
Assert.assertNotNull(newCase.getId());
Assert.assertNotNull(newCase.getStudyId());
// And now let's dig out the new case -- mainly to check that we can actually
// follow this identifier.
Cases caseValue = studyRepository.getStudyCase(study, newCase.getId());
Assert.assertNotNull(caseValue);
Assert.assertEquals(newCase.getId(), caseValue.getId());
Assert.assertEquals(foundCaseOrder, caseValue.getOrder());
// And check we've bumped the order
Cases refoundCase = studyRepository.getStudyCase(study, foundCase.getId());
Assert.assertThat(caseValue.getOrder(), Matchers.lessThan(refoundCase.getOrder()));
Assert.assertThat(foundCase.getOrder(), Matchers.not(refoundCase.getOrder()));
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testFailingNewCase() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
QueryDslJdbcTemplate mockTemplate = createMock(QueryDslJdbcTemplate.class);
expect(mockTemplate.newSqlQuery()).andStubReturn(studyRepository.getTemplate().newSqlQuery());
expect(mockTemplate.queryForObject(anyObject(SQLQuery.class), (Expression<?>) anyObject(Expression.class))).andStubReturn(null);
expect(mockTemplate.update(eq(QCases.cases), anyObject(SqlUpdateCallback.class))).andStubReturn(new Long(1));
expect(mockTemplate.insertWithKey((RelationalPath<?>) anyObject(RelationalPath.class), (SqlInsertWithKeyCallback<?>) anyObject(SqlInsertWithKeyCallback.class))).andStubReturn(null);
replay(mockTemplate);
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Can't create new case"));
QueryDslJdbcTemplate originalTemplate = studyRepository.getTemplate();
studyRepository.setTemplate(mockTemplate);
try {
studyRepository.newStudyCase(study, "test");
} finally {
studyRepository.setTemplate(originalTemplate);
}
}
/**
* Simple test of writing the exact same attributes back into the view. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testGetStudyAttribute() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Attributes attributes = studyRepository.getStudyAttribute(study, "patientId");
Assert.assertEquals("patientId", attributes.getName());
Assert.assertEquals("Patient ID", attributes.getLabel());
}
@Test
@Transactional
@Rollback(true)
public void testNewCaseWithoutManager() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Cases caseValue = studyRepository.getStudyCase(study, 7);
studyRepository.setStudyCaseState(study, caseValue, "morag", "pending");
// Check we now have an audit log entry
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, pager);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("morag", entry.get("eventUser").asText());
Assert.assertTrue(entry.get("eventArgs").get("old_state").isNull());
Assert.assertEquals("pending", entry.get("eventArgs").get("state").asText());
// Check a re-read gets the new state
Cases foundValue = studyRepository.getStudyCase(study, 15);
Assert.assertEquals("pending", foundValue.getState());
}
/**
* Checks that basic filtering works, with an exact match to a string
* @throws RepositoryException
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFiltering() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("DEMO-02"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-02", data.get("patientId").asText());
}
/**
* Checks that basic filtering works, with an exact match to a string
* @throws RepositoryException
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringWithSpaces() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("physician", jsonNodeFactory.textNode("Dr. Z"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(2, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-03", data.get("patientId").asText());
}
/**
* Checks that basic filtering works, with an exact match to a string
* failing to find any records at all.
* @throws RepositoryException
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringMiss() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("MISSING"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(0, dataList.size());
}
/**
* Blanks are a special case. They might be NULL or they might be an
* empty string, so we need to check for both in the underlying
* query that we generate.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringBlank() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("mrn", jsonNodeFactory.textNode("\"\""));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(15, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-05", data.get("patientId").asText());
}
/**
* N/A is a special case. The filter needs to check the missing
* value field rather than the usual value field.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringNA() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("tissueSite", jsonNodeFactory.textNode("N/A"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-06", data.get("patientId").asText());
}
/**
* Wildcards are another filter option, and we should check both pre and
* postfix values.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringWildcardPrefix() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("*-05"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-05", data.get("patientId").asText());
}
/**
* Wildcards are another filter option, and we should check both pre and
* postfix values.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringWildcardSuffix() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("DEMO-0*"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(10, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-01", data.get("patientId").asText());
}
/**
* Wildcards are another filter option, and we should check both pre and
* postfix values.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringExpression() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("tissueSite", jsonNodeFactory.textNode("N/A OR *lung*"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(2, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-03", data.get("patientId").asText());
data = dataList.get(1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-06", data.get("patientId").asText());
}
/**
* Booleans are another filter option,
*/
@Test
@Transactional
@Rollback(true)
public void testBasicBooleanFilteringExpression() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("specimenAvailable", jsonNodeFactory.textNode("No"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(3, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-02", data.get("patientId").asText());
}
/**
* Dates are another filter option,
*/
@Test
@Transactional
@Rollback(true)
public void testBasicDateFilteringExpression() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("consentDate", jsonNodeFactory.textNode("2014-08-18"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-02", data.get("patientId").asText());
}
/**
* Wildcards are another filter option, and we should check both pre and
* postfix values.
*/
@Test
@Transactional
@Rollback(true)
public void testMultipleFiltering1() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("DEMO-03"));
filter.replace("sampleAvailable", jsonNodeFactory.textNode("LMP"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenNo"));
Assert.assertEquals("S12-3000", data.get("specimenNo").asText());
}
/**
* Multiple filters with a blank value seem to be an issue, so let's test
* that case too.
*/
@Test
@Transactional
@Rollback(true)
public void testMultipleFilteringBlankRegressionDate() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("physician", jsonNodeFactory.textNode(""));
filter.replace("patientId", jsonNodeFactory.textNode("DEMO-0*"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(10, dataList.size());
}
/**
* Multiple filters with a blank value seem to be an issue, so let's test
* that case too.
* <p>
* Regression for #101 - error filtering by blank string for dates and booleans
*/
@Test
@Transactional
@Rollback(true)
public void testMultipleFilteringBlankRegressionBoolean() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("sampleAvailable", jsonNodeFactory.textNode(""));
filter.replace("consentDate", jsonNodeFactory.textNode("\"\""));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(15, dataList.size());
}
/**
* Multiple filters with a blank value seem to be an issue, so let's test
* that case too.
* <p>
* Regression for #101 - error filtering by blank string for dates and booleans
*/
@Test
@Transactional
@Rollback(true)
public void testMultipleFiltering3() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("sampleAvailable", jsonNodeFactory.textNode(""));
filter.replace("specimenAvailable", jsonNodeFactory.textNode("\"\""));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(16, dataList.size());
}
/**
* Tests that cases can be deleted using the studyRepository
* @throws RepositoryException
*/
@Test
@Transactional
@Rollback(true)
public void testDeleteCase() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
// First check the data exists
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(1, data.size());
query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
// Try the deletion
studyRepository.deleteCases(query, "morag");
// Now generate the query again, and confirm we can't find it
query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(0, data.size());
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, pager);
Assert.assertEquals(1, auditEntries.size());
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("delete", entry.get("eventType").asText());
JsonNode entryData = entry.get("eventArgs").get("data");
Assert.assertEquals("DEMO-01", entryData.get("patientId").asText());
}
}
| src/test/java/ca/uhnresearch/pughlab/tracker/dao/impl/StudyRepositoryImplTest.java | package ca.uhnresearch.pughlab.tracker.dao.impl;
import static org.hamcrest.Matchers.containsString;
import java.sql.Timestamp;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import static org.easymock.EasyMock.*;
import org.hamcrest.Matchers;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.junit.Rule;
import org.junit.Test;
import org.junit.Assert;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.jdbc.query.QueryDslJdbcTemplate;
import org.springframework.data.jdbc.query.SqlInsertWithKeyCallback;
import org.springframework.data.jdbc.query.SqlUpdateCallback;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mysema.query.sql.RelationalPath;
import com.mysema.query.sql.SQLQuery;
import com.mysema.query.types.Expression;
import ca.uhnresearch.pughlab.tracker.dao.CasePager;
import ca.uhnresearch.pughlab.tracker.dao.InvalidValueException;
import ca.uhnresearch.pughlab.tracker.dao.NotFoundException;
import ca.uhnresearch.pughlab.tracker.dao.RepositoryException;
import ca.uhnresearch.pughlab.tracker.dao.StudyCaseQuery;
import ca.uhnresearch.pughlab.tracker.domain.QAuditLog;
import ca.uhnresearch.pughlab.tracker.domain.QCases;
import ca.uhnresearch.pughlab.tracker.dto.Attributes;
import ca.uhnresearch.pughlab.tracker.dto.AuditLogRecord;
import ca.uhnresearch.pughlab.tracker.dto.Cases;
import ca.uhnresearch.pughlab.tracker.dto.Study;
import ca.uhnresearch.pughlab.tracker.dto.View;
import ca.uhnresearch.pughlab.tracker.dto.ViewAttributes;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath:**/testContextDatabase.xml" })
public class StudyRepositoryImplTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@SuppressWarnings("unused")
private final Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
private StudyRepositoryImpl studyRepository;
@Autowired
private AuditLogRepositoryImpl auditLogRepository;
private JsonNodeFactory jsonNodeFactory = JsonNodeFactory.instance;
private static ObjectMapper objectMapper = new ObjectMapper();
@Test
public void testWiring() {
Assert.assertNotNull(studyRepository);
}
@Test
@Transactional
@Rollback(true)
public void testGetStudy() {
Study s = studyRepository.getStudy("DEMO");
Assert.assertNotNull(s);
Assert.assertEquals("DEMO", s.getName());
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyOptions() {
Study s = studyRepository.getStudy("DEMO");
Assert.assertNotNull(s);
Assert.assertNotNull(s.getOptions());
Assert.assertTrue(s.getOptions().has("stateLabels"));
Assert.assertTrue(s.getOptions().get("stateLabels").isObject());
Assert.assertTrue(s.getOptions().get("stateLabels").has("pending"));
Assert.assertTrue(s.getOptions().get("stateLabels").get("pending").isTextual());
Assert.assertEquals("label1", s.getOptions().get("stateLabels").get("pending").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSaveStudyNew() {
Study s = new Study();
s.setName("TEST");
s.setDescription("A test study");
studyRepository.saveStudy(s);
Assert.assertNotNull(s);
Assert.assertNotNull(s.getId());
Study second = studyRepository.getStudy("TEST");
Assert.assertNotNull(second);
Assert.assertEquals("TEST", second.getName());
Assert.assertEquals("A test study", second.getDescription());
}
@Test
@Transactional
@Rollback(true)
public void testSaveStudyUpdate() {
Study s = studyRepository.getStudy("DEMO");
s.setDescription("Another test");
Study result = studyRepository.saveStudy(s);
Assert.assertNotNull(result);
Assert.assertNotNull(result.getId());
Assert.assertEquals(result.getId(), s.getId());
Study second = studyRepository.getStudy("DEMO");
Assert.assertNotNull(second);
Assert.assertEquals("Another test", second.getDescription());
}
@Test
@Transactional
@Rollback(true)
public void testGetMissingStudy() {
Study s = studyRepository.getStudy("DEMOX");
Assert.assertNull(s);
}
@Test
@Transactional
@Rollback(true)
public void testGetStudies() {
List<Study> list = studyRepository.getAllStudies();
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyViews() {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyView() {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "complete");
Assert.assertNotNull(v);
Assert.assertEquals("complete", v.getName());
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyViewOptions() {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "secondary");
Assert.assertNotNull(v);
Assert.assertEquals("secondary", v.getName());
Assert.assertNotNull(v.getOptions());
Assert.assertNotNull(v.getOptions().get("rows"));
Assert.assertTrue(v.getOptions().get("rows").isArray());
Assert.assertEquals(1, v.getOptions().get("rows").size());
Assert.assertNotNull(v.getOptions().get("rows").get(0));
Assert.assertTrue(v.getOptions().get("rows").get(0).isObject());
Assert.assertEquals("study", v.getOptions().get("rows").get(0).get("attribute").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSetStudyViewOptions() {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "track");
Assert.assertNotNull(v);
Assert.assertEquals("track", v.getName());
ObjectNode viewOptions = objectMapper.createObjectNode();
ObjectNode viewOptionDescriptor = objectMapper.createObjectNode();
ArrayNode viewArray = objectMapper.createArrayNode();
viewOptionDescriptor.put("attribute", "dateEntered");
viewOptionDescriptor.put("value", "test");
viewArray.add(viewOptionDescriptor);
viewOptions.set("rows", viewArray);
v.setOptions(viewOptions);
try {
studyRepository.setStudyView(study, v);
} catch (RepositoryException e) {
Assert.fail();
}
View modifiedView = studyRepository.getStudyView(study, "track");
Assert.assertNotNull(modifiedView.getOptions());
Assert.assertNotNull(modifiedView.getOptions().get("rows"));
Assert.assertTrue(modifiedView.getOptions().get("rows").isArray());
Assert.assertEquals(1, modifiedView.getOptions().get("rows").size());
Assert.assertNotNull(modifiedView.getOptions().get("rows").get(0));
Assert.assertTrue(modifiedView.getOptions().get("rows").get(0).isObject());
Assert.assertEquals("dateEntered", modifiedView.getOptions().get("rows").get(0).get("attribute").asText());
Assert.assertEquals("test", modifiedView.getOptions().get("rows").get(0).get("value").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSetStudyViewOptionsInvalidView() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "track");
Assert.assertNotNull(v);
Assert.assertEquals("track", v.getName());
ObjectNode viewOptions = objectMapper.createObjectNode();
ObjectNode viewOptionDescriptor = objectMapper.createObjectNode();
ArrayNode viewArray = objectMapper.createArrayNode();
viewOptionDescriptor.put("attribute", "dateEntered");
viewOptionDescriptor.put("value", "test");
viewArray.add(viewOptionDescriptor);
viewOptions.set("rows", viewArray);
v.setOptions(viewOptions);
v.setStudyId(100);
thrown.expect(NotFoundException.class);
thrown.expectMessage(containsString("Can't update view for a different study"));
studyRepository.setStudyView(study, v);
}
@Test
@Transactional
@Rollback(true)
public void testGetMissingStudyView() {
Study study = studyRepository.getStudy("DEMO");
View v = studyRepository.getStudyView(study, "completed");
Assert.assertNull(v);
}
@Test
@Transactional
@Rollback(true)
public void testGetStudyAttributes() {
Study study = studyRepository.getStudy("DEMO");
List<Attributes> list = studyRepository.getStudyAttributes(study);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetViewAttributes() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testSmallerGetViewAttributes() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetData() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(10);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(10, list.size());
}
/**
* Regression test for #53 -- checks that only lrgitimate view attributes are
* returned.
*/
@Test
@Transactional
@Rollback(true)
public void testGetDataSecurity() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(10);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(10, list.size());
Assert.assertFalse(list.get(0).has("mrn"));
}
@Test
@Transactional
@Rollback(true)
public void testGetDataNoLimit() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(null);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(20, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetDataNoOffset() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(null);
pager.setLimit(5);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(5, list.size());
}
@Test
@Transactional
@Rollback(true)
public void testGetDataOrdered() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(5);
pager.setOrderField("consentDate");
pager.setOrderDirection(CasePager.OrderDirection.DESC);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, view);
Assert.assertNotNull(list);
Assert.assertEquals(5, list.size());
}
/**
* Checks that when an attribute filter is applied, only the specified attributes are returned.
*/
@Test
@Transactional
@Rollback(true)
public void testGetDataFiltered() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> attributes = studyRepository.getViewAttributes(study, view);
List<ViewAttributes> filteredAttributes = attributes.subList(0, 3);
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(null);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.applyPager(query, pager);
List<ObjectNode> list = studyRepository.getCaseData(query, filteredAttributes);
Assert.assertNotNull(list);
Assert.assertEquals(20, list.size());
for(int i = 0; i < 5; i++) {
Assert.assertFalse(list.get(i).has("physician"));
Assert.assertFalse(list.get(i).has("tissueSite"));
Assert.assertFalse(list.get(i).has("specimenAvailable"));
}
}
@Test
@Transactional
@Rollback(true)
public void testRecordCount() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
Long count = studyRepository.getRecordCount(study, view);
Assert.assertEquals(20, count.intValue());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCase() {
Study study = studyRepository.getStudy("DEMO");
Cases caseValue = studyRepository.getStudyCase(study, 1);
Assert.assertNotNull(caseValue);
Assert.assertEquals(1, caseValue.getId().intValue());
}
@Test
@Transactional
@Rollback(true)
public void testSingleMissingCase() {
Study study = studyRepository.getStudy("DEMO");
Cases caseValue = studyRepository.getStudyCase(study, 100);
Assert.assertNull(caseValue);
}
@Test
@Transactional
@Rollback(true)
public void testSingleFromDifferentStudy() {
Study study = studyRepository.getStudy("DEMO");
Cases caseValue = studyRepository.getStudyCase(study, 22);
Assert.assertNull(caseValue);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseValues() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(1, data.size());
ObjectNode single = data.get(0);
String date = single.get("dateEntered").asText();
Assert.assertNotNull(date);
Assert.assertEquals("2014-08-20", date);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseStateNull() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> dataList = studyRepository.getCaseData(query, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("$state"));
Assert.assertTrue(data.get("$state").isNull());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseStatePending() {
Study study = studyRepository.getStudy("SECOND");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 21);
List<ObjectNode> dataList = studyRepository.getCaseData(query, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("$state"));
Assert.assertTrue(data.get("$state").isTextual());
Assert.assertEquals("pending", data.get("$state").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseNumberValues() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(1, data.size());
ObjectNode single = data.get(0);
Assert.assertTrue(single.has("numberCores"));
Double cores = single.get("numberCores").asDouble();
Assert.assertNotNull(cores);
Assert.assertTrue(Math.abs(cores - 2.0) < 0.00000001);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseValuesNotes() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(1, data.size());
ObjectNode single = data.get(0);
JsonNode notes = single.get("$notes");
Assert.assertNotNull(notes);
// No notes here
Assert.assertNull(notes.get("specimenAvailable"));
// Notes here
JsonNode consentDateNotes = notes.get("consentDate");
Assert.assertNotNull(consentDateNotes);
JsonNode consentDateLocked = consentDateNotes.get("locked");
Assert.assertNotNull(consentDateLocked);
Assert.assertTrue(consentDateLocked.isBoolean());
Assert.assertTrue(consentDateLocked.asBoolean());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseValuesFiltered() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> attributes = studyRepository.getViewAttributes(study, view);
List<ViewAttributes> filteredAttributes = attributes.subList(0, 3);
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> dataList = studyRepository.getCaseData(query, filteredAttributes);
Assert.assertEquals(1, dataList.size());
ObjectNode data = dataList.get(0);
Assert.assertNotNull(data);
for(ViewAttributes va : attributes) {
Boolean filtered = filteredAttributes.contains(va);
Assert.assertEquals("Failed to filter attribute: " + va.getName(),filtered, data.has(va.getName()));
}
}
private ObjectNode getCaseAttributeValue(Study study, View view, Integer caseId) {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, caseId);
List<ObjectNode> dataList = studyRepository.getCaseData(query, view);
Assert.assertEquals(1, dataList.size());
return dataList.get(0);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeValues() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
ObjectNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertEquals("2014-08-20", data.get("dateEntered").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeValuesNotAvailable() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
ObjectNode data = getCaseAttributeValue(study, view, 2);
Assert.assertTrue(data.get("trackerDate").isObject());
Assert.assertTrue(data.get("trackerDate").has("$notAvailable"));
Assert.assertEquals("true", data.get("trackerDate").get("$notAvailable").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeMissing() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
ObjectNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertFalse(data.has("bloodCollDate"));
}
@Test
@Transactional
@Rollback(true)
public void testAuditLog() {
Study study = studyRepository.getStudy("DEMO");
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(0, auditEntries.size());
}
@Test
@Transactional
@Rollback(true)
public void testAuditLogWithNoLimits() {
Study study = studyRepository.getStudy("DEMO");
CasePager query = new CasePager();
query.setOffset(null);
query.setLimit(null);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(0, auditEntries.size());
}
@Test
@Transactional
@Rollback(true)
public void testAuditLogWithBadData() {
Study study = studyRepository.getStudy("DEMO");
CasePager query = new CasePager();
query.setOffset(null);
query.setLimit(null);
List<AuditLogRecord> data = new ArrayList<AuditLogRecord>();
AuditLogRecord entry = new AuditLogRecord();
entry.setEventTime(Timestamp.from(Instant.now()));
entry.setEventArgs("{");
data.add(entry);
QueryDslJdbcTemplate originalTemplate = studyRepository.getTemplate();
QueryDslJdbcTemplate mockTemplate = createMock(QueryDslJdbcTemplate.class);
expect(mockTemplate.newSqlQuery()).andStubReturn(originalTemplate.newSqlQuery());
expect(mockTemplate.query(anyObject(SQLQuery.class), anyObject(QAuditLog.class))).andStubReturn(data);
replay(mockTemplate);
studyRepository.setTemplate(mockTemplate);
List<JsonNode> auditEntries = null;
try {
auditEntries = auditLogRepository.getAuditData(study, query);
} finally {
studyRepository.setTemplate(originalTemplate);
}
Assert.assertNotNull(auditEntries);
}
@Test
@Transactional
@Rollback(true)
public void testAuditLogWithGoodData() {
Study study = studyRepository.getStudy("DEMO");
CasePager query = new CasePager();
query.setOffset(null);
query.setLimit(null);
List<AuditLogRecord> data = new ArrayList<AuditLogRecord>();
AuditLogRecord entry = new AuditLogRecord();
entry.setEventTime(Timestamp.from(Instant.now()));
entry.setEventArgs("{\"old\":null,\"value\":100}");
data.add(entry);
QueryDslJdbcTemplate originalTemplate = studyRepository.getTemplate();
QueryDslJdbcTemplate mockTemplate = createMock(QueryDslJdbcTemplate.class);
expect(mockTemplate.newSqlQuery()).andStubReturn(originalTemplate.newSqlQuery());
expect(mockTemplate.query(anyObject(SQLQuery.class), anyObject(QAuditLog.class))).andStubReturn(data);
replay(mockTemplate);
studyRepository.setTemplate(mockTemplate);
List<JsonNode> auditEntries = null;
try {
auditEntries = auditLogRepository.getAuditData(study, query);
} finally {
studyRepository.setTemplate(originalTemplate);
}
Assert.assertNotNull(auditEntries);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueDate() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("dateEntered", jsonNodeFactory.nullNode());
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("dateEntered", entry.get("attribute").asText());
Assert.assertEquals("2014-08-20", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isNull());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
ObjectNode data = getCaseAttributeValue(study, view, 1);
Assert.assertTrue(data.has("dateEntered"));
Assert.assertTrue(data.get("dateEntered").isNull());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueDateInsert() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 6);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("procedureDate", jsonNodeFactory.textNode("2014-02-03"));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail(e.getMessage());
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("procedureDate", entry.get("attribute").asText());
Assert.assertTrue(entry.get("eventArgs").get("old").isNull());
Assert.assertEquals("2014-02-03", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too.
ObjectNode data = getCaseAttributeValue(study, view, 6);
Assert.assertTrue(data.has("procedureDate"));
Assert.assertEquals("2014-02-03", data.get("procedureDate").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueString() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("patientId", jsonNodeFactory.textNode("DEMO-XX"));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("patientId", entry.get("attribute").asText());
Assert.assertEquals("DEMO-01", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("DEMO-XX", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-XX", data.get("patientId").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueStringInsert() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 10);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenNo", jsonNodeFactory.textNode("SMP-XX"));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenNo", entry.get("attribute").asText());
Assert.assertTrue(entry.get("eventArgs").get("old").isNull());
Assert.assertEquals("SMP-XX", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 10);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenNo"));
Assert.assertEquals("SMP-XX", data.get("specimenNo").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueStringNull() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("patientId", jsonNodeFactory.nullNode());
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("patientId", entry.get("attribute").asText());
Assert.assertEquals("DEMO-01", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isNull());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertTrue(data.get("patientId").isNull());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueOption() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.put("sampleAvailable", "St. Michaels");
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("sampleAvailable", entry.get("attribute").asText());
Assert.assertEquals("LMP", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("St. Michaels", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("sampleAvailable"));
Assert.assertEquals("St. Michaels", data.get("sampleAvailable").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBoolean() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.put("specimenAvailable", false);
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("false", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertTrue(data.get("specimenAvailable").isBoolean());
Assert.assertEquals("false", data.get("specimenAvailable").asText());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBooleanValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.textNode("BAD"));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid boolean"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueStringValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("patientId", jsonNodeFactory.booleanNode(false));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid string"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueDateValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("dateEntered", jsonNodeFactory.booleanNode(false));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid date"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueDateValueFormatError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("dateEntered", jsonNodeFactory.textNode("2015-02-XX"));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid date"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueOptionValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("sampleAvailable", jsonNodeFactory.booleanNode(false));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid string"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueOptionUnexpectedValueError() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("sampleAvailable", jsonNodeFactory.textNode("BAD"));
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Invalid string"));
studyRepository.setQueryAttributes(query, "stuart", values);
}
// Regression test for #6 -- check that multiple writes are handled correctly.
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBooleanTwice() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.booleanNode(false));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.booleanNode(true));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(2, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("false", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertEquals("true", data.get("specimenAvailable").asText());
}
// Regression test for #6 -- check that multiple writes are handled correctly.
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteSameValueBooleanTwice() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.booleanNode(false));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.booleanNode(false));
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, pager);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("old").asText());
Assert.assertEquals("false", entry.get("eventArgs").get("new").asText());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertEquals("false", data.get("specimenAvailable").asText());
}
// Regression test for #7 -- check that N/A writes are handled correctly.
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBooleanNotAvailable() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
ObjectNode notAvailable = objectMapper.createObjectNode();
notAvailable.put("$notAvailable", true);
values.replace("specimenAvailable", notAvailable);
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isObject());
Assert.assertEquals(true, entry.get("eventArgs").get("new").get("$notAvailable").asBoolean());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertTrue(data.get("specimenAvailable").isObject());
Assert.assertEquals(true, data.get("specimenAvailable").get("$notAvailable").asBoolean());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteValueBooleanNull() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 1);
ObjectNode values = jsonNodeFactory.objectNode();
values.replace("specimenAvailable", jsonNodeFactory.nullNode());
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("true", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isNull());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertTrue(data.get("specimenAvailable").isNull());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteNonExistentValue() {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
try {
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addViewCaseMatcher(query, view);
query = studyRepository.addStudyCaseSelector(query, 15);
ObjectNode values = jsonNodeFactory.objectNode();
ObjectNode notAvailable = objectMapper.createObjectNode();
notAvailable.put("$notAvailable", true);
values.replace("specimenAvailable", notAvailable);
studyRepository.setQueryAttributes(query, "stuart", values);
} catch (RepositoryException e) {
Assert.fail();
}
// Check we now have an audit log entry
CasePager query = new CasePager();
query.setOffset(0);
query.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, query);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
// Poke at the first audit log entry
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("stuart", entry.get("eventUser").asText());
Assert.assertEquals("specimenAvailable", entry.get("attribute").asText());
Assert.assertEquals("null", entry.get("eventArgs").get("old").asText());
Assert.assertTrue(entry.get("eventArgs").get("new").isObject());
Assert.assertEquals(true, entry.get("eventArgs").get("new").get("$notAvailable").asBoolean());
// And now, we ought to be able to see the new audit entry in the database, and
// the value should be correct too. Note that as we have set null, we get back a
// JSON null, not a Java one.
JsonNode data = getCaseAttributeValue(study, view, 15);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenAvailable"));
Assert.assertTrue(data.get("specimenAvailable").isObject());
Assert.assertEquals(true, data.get("specimenAvailable").get("$notAvailable").asBoolean());
}
@Test
@Transactional
@Rollback(true)
public void testSingleCaseAttributeWriteMissingAttribute() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Attributes attribute = studyRepository.getStudyAttribute(study, "dateEnteredX");
Assert.assertNull(attribute);
}
/**
* Simple test of writing the exact same attributes back into the study. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testSetStudyAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<Attributes> list = studyRepository.getStudyAttributes(study);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
studyRepository.setStudyAttributes(study, list);
List<Attributes> listAgain = studyRepository.getStudyAttributes(study);
Assert.assertEquals(listAgain.size(), list.size());
int size = list.size();
for(int i = 0; i < size; i++) {
Attributes oldAttribute = list.get(i);
Attributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
}
/**
* Simple test of deleting a number of attributes.
*/
@Test
@Transactional
@Rollback(true)
public void testDeleteStudyAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<Attributes> list = studyRepository.getStudyAttributes(study);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
studyRepository.setStudyAttributes(study, list.subList(0, 10));
List<Attributes> listAgain = studyRepository.getStudyAttributes(study);
Assert.assertEquals(10, listAgain.size());
for(int i = 0; i < 10; i++) {
Attributes oldAttribute = list.get(i);
Attributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testAddStudyAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<Attributes> list = studyRepository.getStudyAttributes(study);
Assert.assertNotNull(list);
Assert.assertEquals(27, list.size());
Attributes att1 = new Attributes();
att1.setName("test");
att1.setType("string");
att1.setLabel("Test");
att1.setDescription("First test attribute");
List<Attributes> modified = list.subList(0, 10);
modified.add(att1);
studyRepository.setStudyAttributes(study, modified);
List<Attributes> listAgain = studyRepository.getStudyAttributes(study);
Assert.assertEquals(11, listAgain.size());
for(int i = 0; i < 10; i++) {
Attributes oldAttribute = list.get(i);
Attributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
Attributes loadedAtt1 = listAgain.get(10);
// Cheatily clear the id, so we can compare all other fields
loadedAtt1.setId(null);
Assert.assertTrue(EqualsBuilder.reflectionEquals(att1, loadedAtt1));
}
/**
* Simple test of writing the exact same attributes back into the study. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testSetStudyViews() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
studyRepository.setStudyViews(study, list);
List<View> listAgain = studyRepository.getStudyViews(study);
Assert.assertEquals(listAgain.size(), list.size());
int size = list.size();
for(int i = 0; i < size; i++) {
View oldView = list.get(i);
View newView = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldView, newView));
}
}
/**
* Simple test of writing the exact same attributes back into the study. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testSetStudyViewsUpdateKey() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
View oldView = list.remove(2);
View newView = new View();
newView.setId(oldView.getId());
newView.setStudyId(oldView.getStudyId());
newView.setOptions(oldView.getOptions());
newView.setName("testView");
newView.setDescription("Test View");
list.add(newView);
Assert.assertEquals(3, list.size());
studyRepository.setStudyViews(study, list);
List<View> listAgain = studyRepository.getStudyViews(study);
Assert.assertEquals(listAgain.size(), list.size());
int size = list.size();
for(int i = 0; i < size; i++) {
View oldViewRead = list.get(i);
View newViewREad = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldViewRead, newViewREad));
}
}
/**
* Simple test of deleting a view.
*/
@Test
@Transactional
@Rollback(true)
public void testDeleteStudyView() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
studyRepository.setStudyViews(study, list.subList(0, 2));
List<View> listAgain = studyRepository.getStudyViews(study);
Assert.assertEquals(2, listAgain.size());
for(int i = 0; i < 2; i++) {
View oldView = list.get(i);
View newView = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldView, newView));
}
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testAddStudyViews() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
List<View> list = studyRepository.getStudyViews(study);
Assert.assertNotNull(list);
Assert.assertEquals(3, list.size());
View v1 = new View();
v1.setName("test");
v1.setDescription("First test attribute");
List<View> modified = list.subList(0, 2);
modified.add(v1);
studyRepository.setStudyViews(study, modified);
List<View> listAgain = studyRepository.getStudyViews(study);
Assert.assertEquals(3, listAgain.size());
for(int i = 0; i < 2; i++) {
View oldAttribute = list.get(i);
View newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
View loadedV1 = listAgain.get(2);
// Cheatily clear the id, so we can compare all other fields
loadedV1.setId(null);
Assert.assertTrue(EqualsBuilder.reflectionEquals(v1, loadedV1));
}
/**
* Simple test of writing the exact same attributes back into the view. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testSetViewAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
studyRepository.setViewAttributes(study, view, list);
List<ViewAttributes> listAgain = studyRepository.getViewAttributes(study, view);
Assert.assertEquals(listAgain.size(), list.size());
int size = list.size();
for(int i = 0; i < size; i++) {
ViewAttributes oldAttribute = list.get(i);
ViewAttributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
}
/**
* Simple test of deleting a number of attributes.
*/
@Test
@Transactional
@Rollback(true)
public void testDeleteViewAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
studyRepository.setViewAttributes(study, view, list.subList(0, 10));
List<ViewAttributes> listAgain = studyRepository.getViewAttributes(study, view);
Assert.assertEquals(10, listAgain.size());
for(int i = 0; i < 10; i++) {
ViewAttributes oldAttribute = list.get(i);
ViewAttributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testAddViewAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
ViewAttributes att1 = new ViewAttributes();
att1.setId(8);
att1.setName("specimenNo");
att1.setType("string");
att1.setLabel("Specimen #");
att1.setStudyId(study.getId());
List<ViewAttributes> modified = list.subList(0, 10);
modified.add(att1);
studyRepository.setViewAttributes(study, view, modified);
List<ViewAttributes> listAgain = studyRepository.getViewAttributes(study, view);
Assert.assertEquals(11, listAgain.size());
for(int i = 0; i < 10; i++) {
Attributes oldAttribute = list.get(i);
Attributes newAttribute = listAgain.get(i);
Assert.assertTrue(EqualsBuilder.reflectionEquals(oldAttribute, newAttribute));
}
Attributes loadedAtt1 = listAgain.get(10);
// Cheatily clear the id, so we can compare all other fields
loadedAtt1.setId(att1.getId());
loadedAtt1.setRank(att1.getRank());
Assert.assertTrue(EqualsBuilder.reflectionEquals(att1, loadedAtt1));
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testAddMissingViewAttributes() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
List<ViewAttributes> list = studyRepository.getViewAttributes(study, view);
Assert.assertNotNull(list);
Assert.assertEquals(15, list.size());
ViewAttributes att1 = new ViewAttributes();
att1.setId(600);
att1.setName("unknown");
att1.setType("string");
att1.setLabel("Specimen #");
att1.setStudyId(study.getId());
List<ViewAttributes> modified = list.subList(0, 10);
modified.add(att1);
thrown.expect(NotFoundException.class);
thrown.expectMessage(containsString("Missing attribute"));
studyRepository.setViewAttributes(study, view, modified);
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testNewCase() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Cases newCase = studyRepository.newStudyCase(study, "test");
Assert.assertNotNull(newCase);
Assert.assertNotNull(newCase.getId());
Assert.assertNotNull(newCase.getStudyId());
// And now let's dig out the new case -- mainly to check that we can actually
// follow this identifier.
Cases caseValue = studyRepository.getStudyCase(study, newCase.getId());
Assert.assertNotNull(caseValue);
Assert.assertEquals(newCase.getId(), caseValue.getId());
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testNewCaseOrdering() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Cases foundCase = studyRepository.getStudyCase(study, 10);
Integer foundCaseOrder = foundCase.getOrder();
Cases newCase = studyRepository.newStudyCase(study, "test", foundCase);
Assert.assertNotNull(newCase);
Assert.assertNotNull(newCase.getId());
Assert.assertNotNull(newCase.getStudyId());
// And now let's dig out the new case -- mainly to check that we can actually
// follow this identifier.
Cases caseValue = studyRepository.getStudyCase(study, newCase.getId());
Assert.assertNotNull(caseValue);
Assert.assertEquals(newCase.getId(), caseValue.getId());
Assert.assertEquals(foundCaseOrder, caseValue.getOrder());
// And check we've bumped the order
Cases refoundCase = studyRepository.getStudyCase(study, foundCase.getId());
Assert.assertThat(caseValue.getOrder(), Matchers.lessThan(refoundCase.getOrder()));
Assert.assertThat(foundCase.getOrder(), Matchers.not(refoundCase.getOrder()));
}
/**
* Simple test of adding a number of attributes as well as deleting.
*/
@Test
@Transactional
@Rollback(true)
public void testFailingNewCase() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
QueryDslJdbcTemplate mockTemplate = createMock(QueryDslJdbcTemplate.class);
expect(mockTemplate.newSqlQuery()).andStubReturn(studyRepository.getTemplate().newSqlQuery());
expect(mockTemplate.queryForObject(anyObject(SQLQuery.class), (Expression<?>) anyObject(Expression.class))).andStubReturn(null);
expect(mockTemplate.update(eq(QCases.cases), anyObject(SqlUpdateCallback.class))).andStubReturn(new Long(1));
expect(mockTemplate.insertWithKey((RelationalPath<?>) anyObject(RelationalPath.class), (SqlInsertWithKeyCallback<?>) anyObject(SqlInsertWithKeyCallback.class))).andStubReturn(null);
replay(mockTemplate);
thrown.expect(InvalidValueException.class);
thrown.expectMessage(containsString("Can't create new case"));
QueryDslJdbcTemplate originalTemplate = studyRepository.getTemplate();
studyRepository.setTemplate(mockTemplate);
try {
studyRepository.newStudyCase(study, "test");
} finally {
studyRepository.setTemplate(originalTemplate);
}
}
/**
* Simple test of writing the exact same attributes back into the view. After
* we do this, a second call should retrieve the exact same data.
*/
@Test
@Transactional
@Rollback(true)
public void testGetStudyAttribute() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Attributes attributes = studyRepository.getStudyAttribute(study, "patientId");
Assert.assertEquals("patientId", attributes.getName());
Assert.assertEquals("Patient ID", attributes.getLabel());
}
@Test
@Transactional
@Rollback(true)
public void testNewCaseWithoutManager() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
Cases caseValue = studyRepository.getStudyCase(study, 7);
studyRepository.setStudyCaseState(study, caseValue, "morag", "pending");
// Check we now have an audit log entry
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, pager);
Assert.assertNotNull(auditEntries);
Assert.assertEquals(1, auditEntries.size());
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("morag", entry.get("eventUser").asText());
Assert.assertTrue(entry.get("eventArgs").get("old_state").isNull());
Assert.assertEquals("pending", entry.get("eventArgs").get("state").asText());
// Check a re-read gets the new state
Cases foundValue = studyRepository.getStudyCase(study, 15);
Assert.assertEquals("pending", foundValue.getState());
}
/**
* Checks that basic filtering works, with an exact match to a string
* @throws RepositoryException
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFiltering() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("DEMO-02"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-02", data.get("patientId").asText());
}
/**
* Checks that basic filtering works, with an exact match to a string
* failing to find any records at all.
* @throws RepositoryException
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringMiss() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("MISSING"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(0, dataList.size());
}
/**
* Blanks are a special case. They might be NULL or they might be an
* empty string, so we need to check for both in the underlying
* query that we generate.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringBlank() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("mrn", jsonNodeFactory.textNode("\"\""));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(15, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-05", data.get("patientId").asText());
}
/**
* N/A is a special case. The filter needs to check the missing
* value field rather than the usual value field.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringNA() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("tissueSite", jsonNodeFactory.textNode("N/A"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-06", data.get("patientId").asText());
}
/**
* Wildcards are another filter option, and we should check both pre and
* postfix values.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringWildcardPrefix() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("*-05"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-05", data.get("patientId").asText());
}
/**
* Wildcards are another filter option, and we should check both pre and
* postfix values.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringWildcardSuffix() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("DEMO-0*"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(10, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-01", data.get("patientId").asText());
}
/**
* Wildcards are another filter option, and we should check both pre and
* postfix values.
*/
@Test
@Transactional
@Rollback(true)
public void testBasicFilteringExpression() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("tissueSite", jsonNodeFactory.textNode("N/A OR *lung*"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(2, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-03", data.get("patientId").asText());
data = dataList.get(1);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-06", data.get("patientId").asText());
}
/**
* Booleans are another filter option,
*/
@Test
@Transactional
@Rollback(true)
public void testBasicBooleanFilteringExpression() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("specimenAvailable", jsonNodeFactory.textNode("No"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(3, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-02", data.get("patientId").asText());
}
/**
* Dates are another filter option,
*/
@Test
@Transactional
@Rollback(true)
public void testBasicDateFilteringExpression() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("consentDate", jsonNodeFactory.textNode("2014-08-18"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("patientId"));
Assert.assertEquals("DEMO-02", data.get("patientId").asText());
}
/**
* Wildcards are another filter option, and we should check both pre and
* postfix values.
*/
@Test
@Transactional
@Rollback(true)
public void testMultipleFiltering1() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("patientId", jsonNodeFactory.textNode("DEMO-03"));
filter.replace("sampleAvailable", jsonNodeFactory.textNode("LMP"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(1, dataList.size());
JsonNode data = dataList.get(0);
Assert.assertNotNull(data);
Assert.assertTrue(data.has("specimenNo"));
Assert.assertEquals("S12-3000", data.get("specimenNo").asText());
}
/**
* Multiple filters with a blank value seem to be an issue, so let's test
* that case too.
*/
@Test
@Transactional
@Rollback(true)
public void testMultipleFilteringBlankRegressionDate() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("physician", jsonNodeFactory.textNode(""));
filter.replace("patientId", jsonNodeFactory.textNode("DEMO-0*"));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(10, dataList.size());
}
/**
* Multiple filters with a blank value seem to be an issue, so let's test
* that case too.
* <p>
* Regression for #101 - error filtering by blank string for dates and booleans
*/
@Test
@Transactional
@Rollback(true)
public void testMultipleFilteringBlankRegressionBoolean() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("sampleAvailable", jsonNodeFactory.textNode(""));
filter.replace("consentDate", jsonNodeFactory.textNode("\"\""));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(15, dataList.size());
}
/**
* Multiple filters with a blank value seem to be an issue, so let's test
* that case too.
* <p>
* Regression for #101 - error filtering by blank string for dates and booleans
*/
@Test
@Transactional
@Rollback(true)
public void testMultipleFiltering3() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "complete");
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
ObjectNode filter = jsonNodeFactory.objectNode();
filter.replace("sampleAvailable", jsonNodeFactory.textNode(""));
filter.replace("specimenAvailable", jsonNodeFactory.textNode("\"\""));
StudyCaseQuery filteredQuery = studyRepository.addStudyCaseFilterSelector(query, filter);
List<ObjectNode> dataList = studyRepository.getCaseData(filteredQuery, view);
Assert.assertNotNull(dataList);
Assert.assertEquals(16, dataList.size());
}
/**
* Tests that cases can be deleted using the studyRepository
* @throws RepositoryException
*/
@Test
@Transactional
@Rollback(true)
public void testDeleteCase() throws RepositoryException {
Study study = studyRepository.getStudy("DEMO");
View view = studyRepository.getStudyView(study, "track");
// First check the data exists
StudyCaseQuery query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
List<ObjectNode> data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(1, data.size());
query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
// Try the deletion
studyRepository.deleteCases(query, "morag");
// Now generate the query again, and confirm we can't find it
query = studyRepository.newStudyCaseQuery(study);
query = studyRepository.addStudyCaseSelector(query, 1);
data = studyRepository.getCaseData(query, view);
Assert.assertNotNull(data);
Assert.assertEquals(0, data.size());
CasePager pager = new CasePager();
pager.setOffset(0);
pager.setLimit(5);
List<JsonNode> auditEntries = auditLogRepository.getAuditData(study, pager);
Assert.assertEquals(1, auditEntries.size());
JsonNode entry = auditEntries.get(0);
Assert.assertEquals("delete", entry.get("eventType").asText());
JsonNode entryData = entry.get("eventArgs").get("data");
Assert.assertEquals("DEMO-01", entryData.get("patientId").asText());
}
}
| Added a test for a value matcher with a space in. Resolves #122
| src/test/java/ca/uhnresearch/pughlab/tracker/dao/impl/StudyRepositoryImplTest.java | Added a test for a value matcher with a space in. Resolves #122 |
|
Java | bsd-3-clause | 947a3b91875c798061bd1ca3e39433936a57134c | 0 | uzen/byteseek | /*
* Copyright Matt Palmer 2009-2012, All rights reserved.
*
* This code is licensed under a standard 3-clause BSD license:
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * The names of its contributors may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package net.domesdaybook.compiler.matcher;
import java.util.ArrayList;
import java.util.List;
import net.domesdaybook.compiler.AbstractCompiler;
import net.domesdaybook.compiler.CompileException;
import net.domesdaybook.matcher.bytes.ByteMatcherFactory;
import net.domesdaybook.matcher.bytes.SetAnalysisByteMatcherFactory;
import net.domesdaybook.matcher.sequence.ByteArrayMatcher;
import net.domesdaybook.matcher.sequence.BytesToSequencesMatcherFactory;
import net.domesdaybook.matcher.sequence.CaseInsensitiveSequenceMatcher;
import net.domesdaybook.matcher.sequence.SequenceMatcher;
import net.domesdaybook.matcher.sequence.SequenceMatcherFactory;
import net.domesdaybook.parser.ParseException;
import net.domesdaybook.parser.Parser;
import net.domesdaybook.parser.regex.RegexParser;
import net.domesdaybook.parser.tree.ParseTree;
import net.domesdaybook.parser.tree.ParseTreeType;
import net.domesdaybook.parser.tree.ParseTreeUtils;
import net.domesdaybook.parser.tree.node.StructuralNode;
/**
* A compiler which produces a {@link SequenceMatcher} from an
* abstract syntax tree provided by a {@link Parser}.
* <p>
* It can handle nearly all the syntax processable by the {@link net.domesdaybook.parser.regex.RegexParser},
* but it cannot handle any syntax which would give variable lengths to
* match, or which would have alternative sequences of bytes,
* as a sequence matcher can only match a single defined sequence.
* <p>
* This means that it *cannot* handle alternatives (X|Y|Z),
* optionality X?, variable length repeats {n-m},
* and the wildcard repeats * and +. It can handle fixed length repeats {n}.
*
* @author Matt Palmer
*/
public class SequenceMatcherCompiler extends AbstractCompiler<SequenceMatcher, ParseTree> {
// Protected constants:
protected static final boolean NOT_YET_INVERTED = false;
protected static SequenceMatcherCompiler defaultCompiler;
/**
* Compiles a SequenceMatcher from a byteSeek regular expression (limited to
* syntax which produces fixed-length sequences). It will use the default
* {@link SetAnalysisByteMatcherFactory} to produce matchers for sets of bytes.
*
* @param expression The regular expression to compile
* @return SequenceMatcher a SequenceMatcher matching the regular expression.
* @throws CompileException If the expression could not be compiled into a SequenceMatcher.
*/
public static SequenceMatcher compileFrom(final String expression) throws CompileException {
defaultCompiler = new SequenceMatcherCompiler();
return defaultCompiler.compile(expression);
}
protected final ByteMatcherFactory byteMatcherFactory;
protected final SequenceMatcherFactory sequenceMatcherFactory;
/**
* Default constructor which uses the {@link SetAnalysisByteMatcherFactory}
* to produce matchers for sets of bytes, and the parser defined in
* AbstractCompiler to produce the abstract syntax tree.
*
*/
public SequenceMatcherCompiler() {
this(null, null);
}
/**
* Constructor which uses the provided {@link ByteMatcherFactory} to
* produce matchers for sets of bytes, and the parser defined in
* AbstractCompiler to produce the abstract syntax tree.
*
* @param factoryToUse The ByteMatcherFactory used to produce matchers
* for sets of bytes.
*/
public SequenceMatcherCompiler(final ByteMatcherFactory factoryToUse) {
this(null, factoryToUse);
}
/**
* Constructor which uses the provided {@link Parser} to produce the abstract
* syntax tree, and the default {@SimpleByteMatcherFactory} to build the byte
* matchers.
*
* @param parser The parser to use to produce the abstract syntax tree.
*/
public SequenceMatcherCompiler(final Parser<ParseTree> parser) {
this(parser, null);
}
/**
* Constructor which uses the provided {@link ByteMatcherFactory} to
* produce matchers for sets of bytes, and the provided {@link Parser} to
* product the abstract syntax tree.
* <p>
* If the parser is null, then the parser used will be the default parser defined
* in {@link AbstractCompiler}. If the factory is null, then the default
* {@link SetAnalysisByteMatcherFactory} will be used.
*
* @param parser The parser to use to produce the abstract syntax tree.
* @param factoryToUse The ByteMatcherFactory used to produce matchers
* for sets of bytes.
*/
public SequenceMatcherCompiler(final Parser<ParseTree> parser,
final ByteMatcherFactory byteFactoryToUse) {
this(parser, byteFactoryToUse, null);
}
//TODO: add constructors which take a sequenceMatcherFactory as a parameter.
/**
* Constructor which uses the provided {@link ByteMatcherFactory} to
* produce matchers for sets of bytes, and the provided {@link Parser} to
* product the abstract syntax tree.
* <p>
* If the parser is null, then the parser used will be the default parser defined
* in {@link AbstractCompiler}. If the factory is null, then the default
* {@link SetAnalysisByteMatcherFactory} will be used.
*
* @param parser The parser to use to produce the abstract syntax tree.
* @param factoryToUse The ByteMatcherFactory used to produce matchers
* for sets of bytes.
*/
public SequenceMatcherCompiler(final Parser<ParseTree> parser,
final ByteMatcherFactory byteFactoryToUse,
final SequenceMatcherFactory sequenceFactoryToUse) {
super(parser == null? new RegexParser() : parser);
byteMatcherFactory = byteFactoryToUse != null?
byteFactoryToUse : new SetAnalysisByteMatcherFactory();
sequenceMatcherFactory = sequenceFactoryToUse != null?
sequenceFactoryToUse : new BytesToSequencesMatcherFactory();
}
/**
* Builds the ParseTree node into a list of sequence matchers.
* Then it uses a SequenceMatcherFactory to produce a single SequenceMatcher from the list.
*
* @param ast The abstract syntax tree to compile.
* @return A SequenceMatcher representing the expression.
* @throws ParseException If the ast could not be parsed.
*/
protected SequenceMatcher doCompile(final ParseTree ast) throws ParseException {
final List<SequenceMatcher> sequenceList = buildSequenceList(ast, new ArrayList<SequenceMatcher>());
return sequenceMatcherFactory.create(sequenceList);
}
/**
* Parses the ParseTree node passed in, building a list of sequence matchers from it.
*
* @param matcherTree The abstract syntax tree to parse.
* @param sequenceList A sequence matcher list to append to.
* @return A list of sequence matchers in the order specified by the ParseTree.
* @throws ParseException If there is a problem parsing the parse tree.
* @throws NullPointerException if the parse tree or sequence list are null.
*/
protected List<SequenceMatcher> buildSequenceList(final ParseTree matcherTree,
final List<SequenceMatcher> sequenceList)
throws ParseException {
switch (matcherTree.getParseTreeType()) {
case BYTE: addByteMatcher( matcherTree, sequenceList); break;
case ANY: addAnyMatcher( matcherTree, sequenceList); break;
case ALL_BITMASK: addAllBitmaskMatcher( matcherTree, sequenceList); break;
case ANY_BITMASK: addAnyBitmaskMatcher( matcherTree, sequenceList); break;
case RANGE: addRangeMatcher( matcherTree, sequenceList); break;
case CASE_SENSITIVE_STRING: addStringMatcher( matcherTree, sequenceList); break;
case CASE_INSENSITIVE_STRING: addCaseInsensitiveStringMatcher( matcherTree, sequenceList); break;
case SEQUENCE: addSequenceMatcher( matcherTree, sequenceList); break;
case REPEAT: addRepeatedSequence( matcherTree, sequenceList); break;
case SET: case ALTERNATIVES: addSetMatcher( matcherTree, sequenceList); break;
default: throw new ParseException(getTypeErrorMessage(matcherTree));
}
return sequenceList;
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addRepeatedSequence(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
final int timesToRepeat = ParseTreeUtils.getFirstRepeatValue(ast);
final SequenceMatcher sequenceToRepeat = doCompile(ParseTreeUtils.getNodeToRepeat(ast));
for (int count = 1; count <= timesToRepeat; count++) {
sequenceList.add(sequenceToRepeat);
}
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addSequenceMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
for (final ParseTree child : ast.getChildren()) {
buildSequenceList(child, sequenceList);
}
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addCaseInsensitiveStringMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(new CaseInsensitiveSequenceMatcher(ast.getTextValue()));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addStringMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(new ByteArrayMatcher(ast.getTextValue()));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addSetMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createMatcherFromSet(ast, byteMatcherFactory));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addRangeMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createRangeMatcher(ast));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addAnyBitmaskMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createAnyBitmaskMatcher(ast));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addAllBitmaskMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createAllBitmaskMatcher(ast));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addAnyMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createAnyMatcher(ast));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addByteMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createByteMatcher(ast));
}
@Override
protected ParseTree joinExpressions(List<ParseTree> expressions) throws ParseException, CompileException {
return new StructuralNode(ParseTreeType.SEQUENCE, expressions, NOT_YET_INVERTED);
}
private String getTypeErrorMessage(final ParseTree ast) {
final ParseTreeType type = ast.getParseTreeType();
return String.format("Unknown type, id %d with description: %s",
type, type.getDescription());
}
/*
protected SequenceMatcher doCompileOld(final ParseTree ast) throws ParseException {
SequenceMatcher matcher = null;
switch (ast.getParseTreeType()) {
// Deals with sequences of values, where a sequence node
// has an ordered list of child nodes.
// Processing is complicated by the need to optimise the
// resulting sequences. We could just build a ByteMatcherArrayMatcher
// (consisting of an array of ByteMatchers), but this is not optimal
// if, for instance, we just have simple list of bytes, for which a
// ByteArrayMatcher would be more appropriate.
case SEQUENCE: {
final List<Byte> byteValuesToJoin = new ArrayList<Byte>();
final List<ByteMatcher> singleByteSequence = new ArrayList<ByteMatcher>();
final List<SequenceMatcher> sequences = new ArrayList<SequenceMatcher>();
for (final ParseTree child : ast.getChildren()) {
switch (child.getParseTreeType()) {
// Bytes and case sensitive strings are just byte values,
// so we join them into a list of values as we go,
// building the final matcher when we run out of bytes
// or case sensitive strings to process.
case BYTE: {
addCollectedByteMatchers(singleByteSequence, sequences);
byteValuesToJoin.add(child.getByteValue());
break;
}
case CASE_SENSITIVE_STRING: {
addCollectedByteMatchers(singleByteSequence, sequences);
final String str = child.getTextValue();
for (int charIndex = 0, end = str.length(); charIndex < end; charIndex++) {
final byte byteValue = (byte) str.charAt(charIndex);
byteValuesToJoin.add(byteValue);
}
break;
}
// bitmasks, sets and any bytes are multiple-valued byte matchers:
case ALL_BITMASK: {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(getAllBitmaskMatcher(child));
break;
}
case ANY_BITMASK: {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(getAnyBitmaskMatcher(child));
break;
}
case SET: {
final ByteMatcher bytematch = getSetMatcher(child, false);
if (bytematch.getNumberOfMatchingBytes() == 1) {
final byte[] matchingBytes = bytematch.getMatchingBytes();
addCollectedByteMatchers(singleByteSequence, sequences);
byteValuesToJoin.add(matchingBytes[0]);
} else {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(bytematch);
}
break;
}
case INVERTED_SET: {
final ByteMatcher bytematch = getSetMatcher(child, true);
final byte[] matchingBytes = bytematch.getMatchingBytes();
if (matchingBytes.length == 1) {
addCollectedByteMatchers(singleByteSequence, sequences);
byteValuesToJoin.add(matchingBytes[0]);
} else {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(bytematch);
}
break;
}
case ANY: {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(AnyByteMatcher.ANY_BYTE_MATCHER);
break;
}
// case insensitive strings are already sequences of values:
case CASE_INSENSITIVE_STRING: {
// Add any bytes or singlebytematchers to the sequences.
// There cannot be both bytes and singlebytematchers
// outstanding to be collected, as they both ensure
// this as they are built.
// so the order of adding them here does not matter.
addCollectedByteValues(byteValuesToJoin, sequences);
addCollectedByteMatchers(singleByteSequence, sequences);
sequences.add(getCaseInsensitiveStringMatcher(child));
break;
}
//FIXME: don't test for instances of particular classes,
// figure out what the criteria is in another way.
case REPEAT: {
SequenceMatcher sequence = getFixedRepeatMatcher(child);
// if the sequence consists entirely of single byte matches,
// just add them to the byte values to join.
if (sequence instanceof ByteArrayMatcher) {
addCollectedByteMatchers(singleByteSequence, sequences);
for (int position = 0; position < sequence.length(); position++) {
final byte value = sequence.getMatcherForPosition(position).getMatchingBytes()[0];
byteValuesToJoin.add(value);
}
} else if (sequence instanceof ByteMatcherArrayMatcher) {
addCollectedByteValues(byteValuesToJoin, sequences);
for (int position = 0; position < sequence.length(); position++) {
final ByteMatcher aMatcher = sequence.getMatcherForPosition(position);
singleByteSequence.add(aMatcher);
}
} else {
addCollectedByteValues(byteValuesToJoin, sequences);
addCollectedByteMatchers(singleByteSequence, sequences);
sequences.add(sequence);
}
break;
}
default: {
throwParseException(ast);
}
}
}
// Add any remaining bytes or byte matchers to the sequences.
// There cannot be both bytes and byte matchers
// outstanding to be collected, as they both ensure this as they are built,
// so the order of adding them here does not matter.
addCollectedByteValues(byteValuesToJoin, sequences);
addCollectedByteMatchers(singleByteSequence, sequences);
// If we only have a single sequence matcher, just return that
// otherwise, build a sequence array matcher from our list of sequences.
// of different sequence matchers:
matcher = sequences.size() == 1
? sequences.get(0)
: new SequenceArrayMatcher(sequences);
break;
}
// Deal with sequences consisting of a single value,
// where there is not a parent Sequence node.
case BYTE: {
matcher = AstCompilerUtils.createByteMatcher(ast, NOT_INVERTED);
// new OneByteMatcher(ast.getByteValue());
break;
}
case ALL_BITMASK: {
matcher = AstCompilerUtils.createAllBitmaskMatcher(ast, NOT_INVERTED);
//getAllBitmaskMatcher(ast);
break;
}
case ANY_BITMASK: {
matcher = AstCompilerUtils.createAnyBitmaskMatcher(ast, NOT_INVERTED);
//getAnyBitmaskMatcher(ast);
break;
}
case SET: {
matcher = AstCompilerUtils.createMatcherFromSet(ast, NOT_INVERTED, matcherFactory);
//getSetMatcher(ast, false);
break;
}
case ANY: {
matcher = AstCompilerUtils.getAnyMatcher(ast, NOT_INVERTED);
//createAnyByteMatcher.ANY_BYTE_MATCHER;
break;
}
case CASE_SENSITIVE_STRING: {
matcher = new ByteArrayMatcher(ast.getTextValue());
break;
}
case CASE_INSENSITIVE_STRING: {
matcher = new CaseInsensitiveSequenceMatcher(ast.getTextValue());
break;
}
case REPEAT: {
matcher = getFixedRepeatMatcher(ast);
break;
}
default: {
throwParseException(ast);
}
}
return matcher;
}
private void addCollectedByteValues(final List<Byte> byteValuesToJoin, final List<SequenceMatcher> sequences) {
if (byteValuesToJoin.size() > 0) {
final ByteArrayMatcher byteMatcher = new ByteArrayMatcher(byteValuesToJoin);
sequences.add(byteMatcher);
byteValuesToJoin.clear();
}
}
private void addCollectedByteMatchers(final List<ByteMatcher> matchers, final List<SequenceMatcher> sequences) {
if (matchers.size() == 1) {
sequences.add(matchers.get(0));
} else if (matchers.size() > 0) {
final ByteMatcherArrayMatcher matcher = new ByteMatcherArrayMatcher(matchers);
sequences.add(matcher);
}
matchers.clear();
}
private SequenceMatcher getCaseInsensitiveStringMatcher(final ParseTree ast) throws ParseException {
return new CaseInsensitiveSequenceMatcher(ast.getTextValue());
}
private ByteMatcher getAllBitmaskMatcher(final ParseTree ast) throws ParseException {
return new AllBitmaskMatcher(ast.getByteValue());
}
private ByteMatcher getAnyBitmaskMatcher(final ParseTree ast) throws ParseException {
return new AnyBitmaskMatcher(ast.getByteValue());
}
private ByteMatcher getSetMatcher(final ParseTree ast, final boolean inverted) throws ParseException {
//TODO: do we need inverted now the inverted set is already inverting the bytes it matches?
return matcherFactory.create(ast.getByteSetValue(), inverted);
}
private SequenceMatcher getFixedRepeatMatcher(final ParseTree ast) throws ParseException {
final List<ParseTree> repeatChildren = ast.getChildren();
final int minRepeat = repeatChildren.get(0).getIntValue();
final int maxRepeat = repeatChildren.get(1).getIntValue();
if (minRepeat == maxRepeat) {
final ParseTree repeatedNode = repeatChildren.get(2);
SequenceMatcher matcher = null;
switch (repeatedNode.getParseTreeType()) {
case ANY: {
matcher = maxRepeat == 1? AnyByteMatcher.ANY_BYTE_MATCHER
: new FixedGapMatcher(maxRepeat);
break;
}
case BYTE: {
matcher = maxRepeat == 1? new OneByteMatcher(repeatedNode.getByteValue())
: new ByteArrayMatcher(repeatedNode.getByteValue(), maxRepeat);
break;
}
case SET: {
final ByteMatcher set = getSetMatcher(repeatedNode, false);
matcher = maxRepeat == 1? set : new ByteMatcherArrayMatcher(set, maxRepeat);
break;
}
case INVERTED_SET: {
final ByteMatcher set = getSetMatcher(repeatedNode, true);
matcher = maxRepeat == 1? set : new ByteMatcherArrayMatcher(set, maxRepeat);
break;
}
case ANY_BITMASK: {
final ByteMatcher anyB = getAnyBitmaskMatcher(repeatedNode);
matcher = maxRepeat == 1? anyB : new ByteMatcherArrayMatcher(anyB, maxRepeat);
break;
}
case ALL_BITMASK: {
final ByteMatcher allB = getAllBitmaskMatcher(repeatedNode);
matcher = maxRepeat == 1? allB : new ByteMatcherArrayMatcher(allB, maxRepeat);
break;
}
case CASE_SENSITIVE_STRING: {
matcher = new ByteArrayMatcher(repeatString(repeatedNode.getTextValue(), maxRepeat));
break;
}
case CASE_INSENSITIVE_STRING: {
matcher = new CaseInsensitiveSequenceMatcher(repeatedNode.getTextValue(), maxRepeat);
break;
}
case SEQUENCE: {
matcher = maxRepeat == 1? doCompile(repeatedNode)
: doCompile(repeatedNode).repeat(maxRepeat);
break;
}
default: {
throwParseException(repeatedNode);
}
}
return matcher;
}
throw new ParseException("Sequences can only contain repeats of a fixed length {n}");
}
private String repeatString(final String stringToRepeat, final int numberToRepeat) {
if (numberToRepeat == 1) {
return stringToRepeat;
}
final StringBuilder builder = new StringBuilder(stringToRepeat.length() * numberToRepeat);
for (int count = 0; count < numberToRepeat; count++) {
builder.append(stringToRepeat);
}
return builder.toString();
}
/**
* @param ast
* @throws ParseException
*/
/*
private void throwParseException(ParseTree ast) throws ParseException {
final ParseTreeType type = ast.getParseTreeType();
final String message = String.format("Unknown type, id %d with description: %s",
type, type.getDescription());
throw new ParseException(message);
}
*/
}
| src/net/domesdaybook/compiler/matcher/SequenceMatcherCompiler.java | /*
* Copyright Matt Palmer 2009-2012, All rights reserved.
*
* This code is licensed under a standard 3-clause BSD license:
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * The names of its contributors may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package net.domesdaybook.compiler.matcher;
import java.util.ArrayList;
import java.util.List;
import net.domesdaybook.compiler.AbstractCompiler;
import net.domesdaybook.compiler.CompileException;
import net.domesdaybook.matcher.bytes.ByteMatcherFactory;
import net.domesdaybook.matcher.bytes.SetAnalysisByteMatcherFactory;
import net.domesdaybook.matcher.sequence.ByteArrayMatcher;
import net.domesdaybook.matcher.sequence.BytesToSequencesMatcherFactory;
import net.domesdaybook.matcher.sequence.CaseInsensitiveSequenceMatcher;
import net.domesdaybook.matcher.sequence.SequenceMatcher;
import net.domesdaybook.matcher.sequence.SequenceMatcherFactory;
import net.domesdaybook.parser.ParseException;
import net.domesdaybook.parser.Parser;
import net.domesdaybook.parser.regex.RegexParser;
import net.domesdaybook.parser.tree.ParseTree;
import net.domesdaybook.parser.tree.ParseTreeType;
import net.domesdaybook.parser.tree.ParseTreeUtils;
import net.domesdaybook.parser.tree.node.StructuralNode;
/**
* A compiler which produces a {@link SequenceMatcher} from an
* abstract syntax tree provided by a {@link Parser}.
* <p>
* It can handle nearly all the syntax processable by the {@link net.domesdaybook.parser.regex.RegexParser},
* but it cannot handle any syntax which would give variable lengths to
* match, or which would have alternative sequences of bytes,
* as a sequence matcher can only match a single defined sequence.
* <p>
* This means that it *cannot* handle alternatives (X|Y|Z),
* optionality X?, variable length repeats {n-m},
* and the wildcard repeats * and +. It can handle fixed length repeats {n}.
*
* @author Matt Palmer
*/
public class SequenceMatcherCompiler extends AbstractCompiler<SequenceMatcher, ParseTree> {
// Protected constants:
protected static final boolean NOT_YET_INVERTED = false;
protected static SequenceMatcherCompiler defaultCompiler;
/**
* Compiles a SequenceMatcher from a byteSeek regular expression (limited to
* syntax which produces fixed-length sequences). It will use the default
* {@link SetAnalysisByteMatcherFactory} to produce matchers for sets of bytes.
*
* @param expression The regular expression to compile
* @return SequenceMatcher a SequenceMatcher matching the regular expression.
* @throws CompileException If the expression could not be compiled into a SequenceMatcher.
*/
public static SequenceMatcher compileFrom(final String expression) throws CompileException {
defaultCompiler = new SequenceMatcherCompiler();
return defaultCompiler.compile(expression);
}
protected final ByteMatcherFactory byteMatcherFactory;
protected final SequenceMatcherFactory sequenceMatcherFactory;
/**
* Default constructor which uses the {@link SetAnalysisByteMatcherFactory}
* to produce matchers for sets of bytes, and the parser defined in
* AbstractCompiler to produce the abstract syntax tree.
*
*/
public SequenceMatcherCompiler() {
this(null, null);
}
/**
* Constructor which uses the provided {@link ByteMatcherFactory} to
* produce matchers for sets of bytes, and the parser defined in
* AbstractCompiler to produce the abstract syntax tree.
*
* @param factoryToUse The ByteMatcherFactory used to produce matchers
* for sets of bytes.
*/
public SequenceMatcherCompiler(final ByteMatcherFactory factoryToUse) {
this(null, factoryToUse);
}
/**
* Constructor which uses the provided {@link Parser} to produce the abstract
* syntax tree, and the default {@SimpleByteMatcherFactory} to build the byte
* matchers.
*
* @param parser The parser to use to produce the abstract syntax tree.
*/
public SequenceMatcherCompiler(final Parser<ParseTree> parser) {
this(parser, null);
}
/**
* Constructor which uses the provided {@link ByteMatcherFactory} to
* produce matchers for sets of bytes, and the provided {@link Parser} to
* product the abstract syntax tree.
* <p>
* If the parser is null, then the parser used will be the default parser defined
* in {@link AbstractCompiler}. If the factory is null, then the default
* {@link SetAnalysisByteMatcherFactory} will be used.
*
* @param parser The parser to use to produce the abstract syntax tree.
* @param factoryToUse The ByteMatcherFactory used to produce matchers
* for sets of bytes.
*/
public SequenceMatcherCompiler(final Parser<ParseTree> parser,
final ByteMatcherFactory byteFactoryToUse) {
this(parser, byteFactoryToUse, null);
}
//TODO: add constructors which take a sequenceMatcherFactory as a parameter.
/**
* Constructor which uses the provided {@link ByteMatcherFactory} to
* produce matchers for sets of bytes, and the provided {@link Parser} to
* product the abstract syntax tree.
* <p>
* If the parser is null, then the parser used will be the default parser defined
* in {@link AbstractCompiler}. If the factory is null, then the default
* {@link SetAnalysisByteMatcherFactory} will be used.
*
* @param parser The parser to use to produce the abstract syntax tree.
* @param factoryToUse The ByteMatcherFactory used to produce matchers
* for sets of bytes.
*/
public SequenceMatcherCompiler(final Parser<ParseTree> parser,
final ByteMatcherFactory byteFactoryToUse,
final SequenceMatcherFactory sequenceFactoryToUse) {
super(parser == null? new RegexParser() : parser);
byteMatcherFactory = byteFactoryToUse != null?
byteFactoryToUse : new SetAnalysisByteMatcherFactory();
sequenceMatcherFactory = sequenceFactoryToUse != null?
sequenceFactoryToUse : new BytesToSequencesMatcherFactory();
}
/**
* Builds the ParseTree node into a list of sequence matchers.
* Then it uses a SequenceMatcherFactory to produce a single SequenceMatcher from the list.
*
* @param ast The abstract syntax tree to compile.
* @return A SequenceMatcher representing the expression.
* @throws ParseException If the ast could not be parsed.
*/
protected SequenceMatcher doCompile(final ParseTree ast) throws ParseException {
final List<SequenceMatcher> sequenceList = buildSequenceList(ast, new ArrayList<SequenceMatcher>());
return sequenceMatcherFactory.create(sequenceList);
}
/**
* Parses the ParseTree node passed in, building a list of sequence matchers from it.
*
* @param ast The abstract syntax tree to parse.
* @param sequenceList A sequence matcher list to append to.
* @return A list of sequence matchers in the order specified by the ParseTree.
* @throws ParseException If there is a problem parsing the parse tree.
* @throws NullPointerException if the parse tree or sequence list are null.
*/
protected List<SequenceMatcher> buildSequenceList(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
switch (ast.getParseTreeType()) {
case BYTE: addByteMatcher( ast, sequenceList); break;
case ANY: addAnyMatcher( ast, sequenceList); break;
case ALL_BITMASK: addAllBitmaskMatcher( ast, sequenceList); break;
case ANY_BITMASK: addAnyBitmaskMatcher( ast, sequenceList); break;
case RANGE: addRangeMatcher( ast, sequenceList); break;
case CASE_SENSITIVE_STRING: addStringMatcher( ast, sequenceList); break;
case CASE_INSENSITIVE_STRING: addCaseInsensitiveStringMatcher( ast, sequenceList); break;
case SEQUENCE: addSequenceMatcher( ast, sequenceList); break;
case REPEAT: addRepeatedSequence( ast, sequenceList); break;
case SET: case ALTERNATIVES: addSetMatcher( ast, sequenceList); break;
default: throw new ParseException(getTypeErrorMessage(ast));
}
return sequenceList;
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addRepeatedSequence(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
final int timesToRepeat = ParseTreeUtils.getFirstRepeatValue(ast);
final SequenceMatcher sequenceToRepeat = doCompile(ParseTreeUtils.getNodeToRepeat(ast));
for (int count = 1; count <= timesToRepeat; count++) {
sequenceList.add(sequenceToRepeat);
}
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addSequenceMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
for (final ParseTree child : ast.getChildren()) {
buildSequenceList(child, sequenceList);
}
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addCaseInsensitiveStringMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(new CaseInsensitiveSequenceMatcher(ast.getTextValue()));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addStringMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(new ByteArrayMatcher(ast.getTextValue()));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addSetMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createMatcherFromSet(ast, byteMatcherFactory));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addRangeMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createRangeMatcher(ast));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addAnyBitmaskMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createAnyBitmaskMatcher(ast));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addAllBitmaskMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createAllBitmaskMatcher(ast));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addAnyMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createAnyMatcher(ast));
}
/**
* @param ast
* @param sequenceList
* @throws ParseException
*/
private void addByteMatcher(final ParseTree ast,
final List<SequenceMatcher> sequenceList)
throws ParseException {
sequenceList.add(CompilerUtils.createByteMatcher(ast));
}
@Override
protected ParseTree joinExpressions(List<ParseTree> expressions) throws ParseException, CompileException {
return new StructuralNode(ParseTreeType.SEQUENCE, expressions, NOT_YET_INVERTED);
}
private String getTypeErrorMessage(final ParseTree ast) {
final ParseTreeType type = ast.getParseTreeType();
return String.format("Unknown type, id %d with description: %s",
type, type.getDescription());
}
/*
protected SequenceMatcher doCompileOld(final ParseTree ast) throws ParseException {
SequenceMatcher matcher = null;
switch (ast.getParseTreeType()) {
// Deals with sequences of values, where a sequence node
// has an ordered list of child nodes.
// Processing is complicated by the need to optimise the
// resulting sequences. We could just build a ByteMatcherArrayMatcher
// (consisting of an array of ByteMatchers), but this is not optimal
// if, for instance, we just have simple list of bytes, for which a
// ByteArrayMatcher would be more appropriate.
case SEQUENCE: {
final List<Byte> byteValuesToJoin = new ArrayList<Byte>();
final List<ByteMatcher> singleByteSequence = new ArrayList<ByteMatcher>();
final List<SequenceMatcher> sequences = new ArrayList<SequenceMatcher>();
for (final ParseTree child : ast.getChildren()) {
switch (child.getParseTreeType()) {
// Bytes and case sensitive strings are just byte values,
// so we join them into a list of values as we go,
// building the final matcher when we run out of bytes
// or case sensitive strings to process.
case BYTE: {
addCollectedByteMatchers(singleByteSequence, sequences);
byteValuesToJoin.add(child.getByteValue());
break;
}
case CASE_SENSITIVE_STRING: {
addCollectedByteMatchers(singleByteSequence, sequences);
final String str = child.getTextValue();
for (int charIndex = 0, end = str.length(); charIndex < end; charIndex++) {
final byte byteValue = (byte) str.charAt(charIndex);
byteValuesToJoin.add(byteValue);
}
break;
}
// bitmasks, sets and any bytes are multiple-valued byte matchers:
case ALL_BITMASK: {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(getAllBitmaskMatcher(child));
break;
}
case ANY_BITMASK: {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(getAnyBitmaskMatcher(child));
break;
}
case SET: {
final ByteMatcher bytematch = getSetMatcher(child, false);
if (bytematch.getNumberOfMatchingBytes() == 1) {
final byte[] matchingBytes = bytematch.getMatchingBytes();
addCollectedByteMatchers(singleByteSequence, sequences);
byteValuesToJoin.add(matchingBytes[0]);
} else {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(bytematch);
}
break;
}
case INVERTED_SET: {
final ByteMatcher bytematch = getSetMatcher(child, true);
final byte[] matchingBytes = bytematch.getMatchingBytes();
if (matchingBytes.length == 1) {
addCollectedByteMatchers(singleByteSequence, sequences);
byteValuesToJoin.add(matchingBytes[0]);
} else {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(bytematch);
}
break;
}
case ANY: {
addCollectedByteValues(byteValuesToJoin, sequences);
singleByteSequence.add(AnyByteMatcher.ANY_BYTE_MATCHER);
break;
}
// case insensitive strings are already sequences of values:
case CASE_INSENSITIVE_STRING: {
// Add any bytes or singlebytematchers to the sequences.
// There cannot be both bytes and singlebytematchers
// outstanding to be collected, as they both ensure
// this as they are built.
// so the order of adding them here does not matter.
addCollectedByteValues(byteValuesToJoin, sequences);
addCollectedByteMatchers(singleByteSequence, sequences);
sequences.add(getCaseInsensitiveStringMatcher(child));
break;
}
//FIXME: don't test for instances of particular classes,
// figure out what the criteria is in another way.
case REPEAT: {
SequenceMatcher sequence = getFixedRepeatMatcher(child);
// if the sequence consists entirely of single byte matches,
// just add them to the byte values to join.
if (sequence instanceof ByteArrayMatcher) {
addCollectedByteMatchers(singleByteSequence, sequences);
for (int position = 0; position < sequence.length(); position++) {
final byte value = sequence.getMatcherForPosition(position).getMatchingBytes()[0];
byteValuesToJoin.add(value);
}
} else if (sequence instanceof ByteMatcherArrayMatcher) {
addCollectedByteValues(byteValuesToJoin, sequences);
for (int position = 0; position < sequence.length(); position++) {
final ByteMatcher aMatcher = sequence.getMatcherForPosition(position);
singleByteSequence.add(aMatcher);
}
} else {
addCollectedByteValues(byteValuesToJoin, sequences);
addCollectedByteMatchers(singleByteSequence, sequences);
sequences.add(sequence);
}
break;
}
default: {
throwParseException(ast);
}
}
}
// Add any remaining bytes or byte matchers to the sequences.
// There cannot be both bytes and byte matchers
// outstanding to be collected, as they both ensure this as they are built,
// so the order of adding them here does not matter.
addCollectedByteValues(byteValuesToJoin, sequences);
addCollectedByteMatchers(singleByteSequence, sequences);
// If we only have a single sequence matcher, just return that
// otherwise, build a sequence array matcher from our list of sequences.
// of different sequence matchers:
matcher = sequences.size() == 1
? sequences.get(0)
: new SequenceArrayMatcher(sequences);
break;
}
// Deal with sequences consisting of a single value,
// where there is not a parent Sequence node.
case BYTE: {
matcher = AstCompilerUtils.createByteMatcher(ast, NOT_INVERTED);
// new OneByteMatcher(ast.getByteValue());
break;
}
case ALL_BITMASK: {
matcher = AstCompilerUtils.createAllBitmaskMatcher(ast, NOT_INVERTED);
//getAllBitmaskMatcher(ast);
break;
}
case ANY_BITMASK: {
matcher = AstCompilerUtils.createAnyBitmaskMatcher(ast, NOT_INVERTED);
//getAnyBitmaskMatcher(ast);
break;
}
case SET: {
matcher = AstCompilerUtils.createMatcherFromSet(ast, NOT_INVERTED, matcherFactory);
//getSetMatcher(ast, false);
break;
}
case ANY: {
matcher = AstCompilerUtils.getAnyMatcher(ast, NOT_INVERTED);
//createAnyByteMatcher.ANY_BYTE_MATCHER;
break;
}
case CASE_SENSITIVE_STRING: {
matcher = new ByteArrayMatcher(ast.getTextValue());
break;
}
case CASE_INSENSITIVE_STRING: {
matcher = new CaseInsensitiveSequenceMatcher(ast.getTextValue());
break;
}
case REPEAT: {
matcher = getFixedRepeatMatcher(ast);
break;
}
default: {
throwParseException(ast);
}
}
return matcher;
}
private void addCollectedByteValues(final List<Byte> byteValuesToJoin, final List<SequenceMatcher> sequences) {
if (byteValuesToJoin.size() > 0) {
final ByteArrayMatcher byteMatcher = new ByteArrayMatcher(byteValuesToJoin);
sequences.add(byteMatcher);
byteValuesToJoin.clear();
}
}
private void addCollectedByteMatchers(final List<ByteMatcher> matchers, final List<SequenceMatcher> sequences) {
if (matchers.size() == 1) {
sequences.add(matchers.get(0));
} else if (matchers.size() > 0) {
final ByteMatcherArrayMatcher matcher = new ByteMatcherArrayMatcher(matchers);
sequences.add(matcher);
}
matchers.clear();
}
private SequenceMatcher getCaseInsensitiveStringMatcher(final ParseTree ast) throws ParseException {
return new CaseInsensitiveSequenceMatcher(ast.getTextValue());
}
private ByteMatcher getAllBitmaskMatcher(final ParseTree ast) throws ParseException {
return new AllBitmaskMatcher(ast.getByteValue());
}
private ByteMatcher getAnyBitmaskMatcher(final ParseTree ast) throws ParseException {
return new AnyBitmaskMatcher(ast.getByteValue());
}
private ByteMatcher getSetMatcher(final ParseTree ast, final boolean inverted) throws ParseException {
//TODO: do we need inverted now the inverted set is already inverting the bytes it matches?
return matcherFactory.create(ast.getByteSetValue(), inverted);
}
private SequenceMatcher getFixedRepeatMatcher(final ParseTree ast) throws ParseException {
final List<ParseTree> repeatChildren = ast.getChildren();
final int minRepeat = repeatChildren.get(0).getIntValue();
final int maxRepeat = repeatChildren.get(1).getIntValue();
if (minRepeat == maxRepeat) {
final ParseTree repeatedNode = repeatChildren.get(2);
SequenceMatcher matcher = null;
switch (repeatedNode.getParseTreeType()) {
case ANY: {
matcher = maxRepeat == 1? AnyByteMatcher.ANY_BYTE_MATCHER
: new FixedGapMatcher(maxRepeat);
break;
}
case BYTE: {
matcher = maxRepeat == 1? new OneByteMatcher(repeatedNode.getByteValue())
: new ByteArrayMatcher(repeatedNode.getByteValue(), maxRepeat);
break;
}
case SET: {
final ByteMatcher set = getSetMatcher(repeatedNode, false);
matcher = maxRepeat == 1? set : new ByteMatcherArrayMatcher(set, maxRepeat);
break;
}
case INVERTED_SET: {
final ByteMatcher set = getSetMatcher(repeatedNode, true);
matcher = maxRepeat == 1? set : new ByteMatcherArrayMatcher(set, maxRepeat);
break;
}
case ANY_BITMASK: {
final ByteMatcher anyB = getAnyBitmaskMatcher(repeatedNode);
matcher = maxRepeat == 1? anyB : new ByteMatcherArrayMatcher(anyB, maxRepeat);
break;
}
case ALL_BITMASK: {
final ByteMatcher allB = getAllBitmaskMatcher(repeatedNode);
matcher = maxRepeat == 1? allB : new ByteMatcherArrayMatcher(allB, maxRepeat);
break;
}
case CASE_SENSITIVE_STRING: {
matcher = new ByteArrayMatcher(repeatString(repeatedNode.getTextValue(), maxRepeat));
break;
}
case CASE_INSENSITIVE_STRING: {
matcher = new CaseInsensitiveSequenceMatcher(repeatedNode.getTextValue(), maxRepeat);
break;
}
case SEQUENCE: {
matcher = maxRepeat == 1? doCompile(repeatedNode)
: doCompile(repeatedNode).repeat(maxRepeat);
break;
}
default: {
throwParseException(repeatedNode);
}
}
return matcher;
}
throw new ParseException("Sequences can only contain repeats of a fixed length {n}");
}
private String repeatString(final String stringToRepeat, final int numberToRepeat) {
if (numberToRepeat == 1) {
return stringToRepeat;
}
final StringBuilder builder = new StringBuilder(stringToRepeat.length() * numberToRepeat);
for (int count = 0; count < numberToRepeat; count++) {
builder.append(stringToRepeat);
}
return builder.toString();
}
/**
* @param ast
* @throws ParseException
*/
/*
private void throwParseException(ParseTree ast) throws ParseException {
final ParseTreeType type = ast.getParseTreeType();
final String message = String.format("Unknown type, id %d with description: %s",
type, type.getDescription());
throw new ParseException(message);
}
*/
}
| parameter renaming hopefully for clarity.
| src/net/domesdaybook/compiler/matcher/SequenceMatcherCompiler.java | parameter renaming hopefully for clarity. |
|
Java | bsd-3-clause | 0fe6925b961fd6b7fed6efd724fa4bc95b5648fb | 0 | msf-oca-his/dhis2-core,msf-oca-his/dhis2-core,msf-oca-his/dhis2-core,dhis2/dhis2-core,hispindia/dhis2-Core,dhis2/dhis2-core,hispindia/dhis2-Core,hispindia/dhis2-Core,dhis2/dhis2-core,hispindia/dhis2-Core,msf-oca-his/dhis2-core,msf-oca-his/dhis2-core,dhis2/dhis2-core,dhis2/dhis2-core,hispindia/dhis2-Core | /*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.webapi.controller;
import java.io.IOException;
import java.io.Writer;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.hisp.dhis.common.DhisApiVersion;
import org.hisp.dhis.common.cache.CacheStrategy;
import org.hisp.dhis.dxf2.webmessage.WebMessageUtils;
import org.hisp.dhis.setting.SettingKey;
import org.hisp.dhis.setting.SystemSettingManager;
import org.hisp.dhis.webapi.mvc.annotation.ApiVersion;
import org.hisp.dhis.webapi.service.WebMessageService;
import org.hisp.dhis.webapi.utils.ContextUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseStatus;
/**
* @author Lars Helge Overland
*/
@Controller
@RequestMapping( value = FileController.RESOURCE_PATH )
@ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } )
public class FileController
{
public static final String RESOURCE_PATH = "/files";
@Autowired
private SystemSettingManager systemSettingManager;
@Autowired
private ContextUtils contextUtils;
@Autowired
private WebMessageService webMessageService;
// -------------------------------------------------------------------------
// Custom script
// -------------------------------------------------------------------------
@RequestMapping( value = "/script", method = RequestMethod.GET )
public void getCustomScript( HttpServletResponse response, Writer writer )
throws IOException
{
contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_JAVASCRIPT, CacheStrategy.CACHE_TWO_WEEKS );
String content = (String) systemSettingManager.getSystemSetting( SettingKey.CUSTOM_JS, StringUtils.EMPTY );
if ( content != null )
{
writer.write( content );
}
}
@RequestMapping( value = "/script", method = RequestMethod.POST, consumes = "application/javascript" )
@PreAuthorize( "hasRole('ALL') or hasRole('F_INSERT_CUSTOM_JS_CSS')" )
public void postCustomScript( @RequestBody String content, HttpServletResponse response,
HttpServletRequest request )
{
if ( content != null )
{
systemSettingManager.saveSystemSetting( SettingKey.CUSTOM_JS, content );
webMessageService.send( WebMessageUtils.ok( "Custom script created" ), response, request );
}
}
@RequestMapping( value = "/script", method = RequestMethod.DELETE )
@PreAuthorize( "hasRole('ALL') or hasRole('F_INSERT_CUSTOM_JS_CSS')" )
@ResponseStatus( HttpStatus.NO_CONTENT )
public void removeCustomScript( HttpServletResponse response )
{
systemSettingManager.deleteSystemSetting( SettingKey.CUSTOM_JS );
}
// -------------------------------------------------------------------------
// Custom style
// -------------------------------------------------------------------------
/**
* The style/external mapping enables style to be reached from login page /
* before authentication.
*/
@RequestMapping( value = { "/style", "/style/external" }, method = RequestMethod.GET )
public void getCustomStyle( HttpServletResponse response, Writer writer )
throws IOException
{
contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_CSS, CacheStrategy.CACHE_TWO_WEEKS );
String content = (String) systemSettingManager.getSystemSetting( SettingKey.CUSTOM_CSS, StringUtils.EMPTY );
if ( content != null )
{
writer.write( content );
}
}
@RequestMapping( value = "/style", method = RequestMethod.POST, consumes = "text/css" )
@PreAuthorize( "hasRole('ALL') or hasRole('F_INSERT_CUSTOM_JS_CSS')" )
public void postCustomStyle( @RequestBody String content, HttpServletResponse response, HttpServletRequest request )
{
if ( content != null )
{
systemSettingManager.saveSystemSetting( SettingKey.CUSTOM_CSS, content );
webMessageService.send( WebMessageUtils.ok( "Custom style created" ), response, request );
}
}
@RequestMapping( value = "/style", method = RequestMethod.DELETE )
@PreAuthorize( "hasRole('ALL') or hasRole('F_INSERT_CUSTOM_JS_CSS')" )
@ResponseStatus( HttpStatus.NO_CONTENT )
public void removeCustomStyle( HttpServletResponse response )
{
systemSettingManager.deleteSystemSetting( SettingKey.CUSTOM_CSS );
}
}
| dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/FileController.java | /*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.webapi.controller;
import java.io.IOException;
import java.io.Writer;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.hisp.dhis.common.DhisApiVersion;
import org.hisp.dhis.common.cache.CacheStrategy;
import org.hisp.dhis.dxf2.webmessage.WebMessageUtils;
import org.hisp.dhis.setting.SettingKey;
import org.hisp.dhis.setting.SystemSettingManager;
import org.hisp.dhis.webapi.mvc.annotation.ApiVersion;
import org.hisp.dhis.webapi.service.WebMessageService;
import org.hisp.dhis.webapi.utils.ContextUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseStatus;
/**
* @author Lars Helge Overland
*/
@Controller
@RequestMapping( value = FileController.RESOURCE_PATH )
@ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } )
public class FileController
{
public static final String RESOURCE_PATH = "/files";
@Autowired
private SystemSettingManager systemSettingManager;
@Autowired
private ContextUtils contextUtils;
@Autowired
private WebMessageService webMessageService;
// -------------------------------------------------------------------------
// Custom script
// -------------------------------------------------------------------------
@RequestMapping( value = "/script", method = RequestMethod.GET )
public void getCustomScript( HttpServletResponse response, Writer writer )
throws IOException
{
contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_JAVASCRIPT, CacheStrategy.CACHE_TWO_WEEKS );
String content = (String) systemSettingManager.getSystemSetting( SettingKey.CUSTOM_JS, StringUtils.EMPTY );
writer.write( content );
}
@RequestMapping( value = "/script", method = RequestMethod.POST, consumes = "application/javascript" )
@PreAuthorize( "hasRole('ALL') or hasRole('F_INSERT_CUSTOM_JS_CSS')" )
public void postCustomScript( @RequestBody String content, HttpServletResponse response,
HttpServletRequest request )
{
if ( content != null )
{
systemSettingManager.saveSystemSetting( SettingKey.CUSTOM_JS, content );
webMessageService.send( WebMessageUtils.ok( "Custom script created" ), response, request );
}
}
@RequestMapping( value = "/script", method = RequestMethod.DELETE )
@PreAuthorize( "hasRole('ALL') or hasRole('F_INSERT_CUSTOM_JS_CSS')" )
@ResponseStatus( HttpStatus.NO_CONTENT )
public void removeCustomScript( HttpServletResponse response )
{
systemSettingManager.deleteSystemSetting( SettingKey.CUSTOM_JS );
}
// -------------------------------------------------------------------------
// Custom style
// -------------------------------------------------------------------------
/**
* The style/external mapping enables style to be reached from login page /
* before authentication.
*/
@RequestMapping( value = { "/style", "/style/external" }, method = RequestMethod.GET )
public void getCustomStyle( HttpServletResponse response, Writer writer )
throws IOException
{
contextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_CSS, CacheStrategy.CACHE_TWO_WEEKS );
String content = (String) systemSettingManager.getSystemSetting( SettingKey.CUSTOM_CSS, StringUtils.EMPTY );
writer.write( content );
}
@RequestMapping( value = "/style", method = RequestMethod.POST, consumes = "text/css" )
@PreAuthorize( "hasRole('ALL') or hasRole('F_INSERT_CUSTOM_JS_CSS')" )
public void postCustomStyle( @RequestBody String content, HttpServletResponse response, HttpServletRequest request )
{
if ( content != null )
{
systemSettingManager.saveSystemSetting( SettingKey.CUSTOM_CSS, content );
webMessageService.send( WebMessageUtils.ok( "Custom style created" ), response, request );
}
}
@RequestMapping( value = "/style", method = RequestMethod.DELETE )
@PreAuthorize( "hasRole('ALL') or hasRole('F_INSERT_CUSTOM_JS_CSS')" )
@ResponseStatus( HttpStatus.NO_CONTENT )
public void removeCustomStyle( HttpServletResponse response )
{
systemSettingManager.deleteSystemSetting( SettingKey.CUSTOM_CSS );
}
}
| chore: Prevent NP on get custom script and stylesheet (#7762)
Signed-off-by: Morten Svanaes <[email protected]> | dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/FileController.java | chore: Prevent NP on get custom script and stylesheet (#7762) |
|
Java | bsd-3-clause | 8a9b6815a18596612a23a0c521a4a920a8a6a4ad | 0 | edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon | /*
* $Id: NaturePublishingGroupArticleIteratorFactory.java,v 1.8 2010-12-11 08:14:44 thib_gc Exp $
*/
/*
Copyright (c) 2000-2010 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.nature;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import org.lockss.util.*;
import org.lockss.plugin.*;
import org.lockss.extractor.*;
import org.lockss.daemon.PluginException;
public class NaturePublishingGroupArticleIteratorFactory
implements ArticleIteratorFactory,
ArticleMetadataExtractorFactory {
protected static Logger log = Logger.getLogger("NaturePublishingGroupArticleIteratorFactory");
protected static final String ROOT_TEMPLATE = "\"%s%s/journal/v%s/\", base_url, journal_id, volume_name";
protected static final String PATTERN_TEMPLATE = "\"^%s%s/journal/v[^/]+/n[^/]+/(full/[^/]+\\.html|pdf/[^/]+\\.pdf)$\", base_url, journal_id, volume_name";
/*
* The Nature URL structure means that the HTML for an article is
* at a URL like http://www.nature.com/gt/journal/v16/n5/full/gt200929a.html
* ie <base_url>/<journal_id>/journal/v<volume> is the subtree we want.
*/
public Iterator<ArticleFiles> createArticleIterator(ArchivalUnit au,
MetadataTarget target)
throws PluginException {
return new NaturePublishingGroupArticleIterator(au, new SubTreeArticleIterator.Spec()
.setTarget(target)
.setRootTemplate(ROOT_TEMPLATE)
.setPatternTemplate(PATTERN_TEMPLATE));
}
public ArticleMetadataExtractor createArticleMetadataExtractor(MetadataTarget target)
throws PluginException {
return new NatureArticleMetadataExtractor();
}
protected static class NaturePublishingGroupArticleIterator
extends SubTreeArticleIterator {
protected static Pattern HTML_PATTERN = Pattern.compile("/full/([^/]+)\\.html$", Pattern.CASE_INSENSITIVE);
protected static Pattern PDF_PATTERN = Pattern.compile("/pdf/([^/]+)\\.pdf$", Pattern.CASE_INSENSITIVE);
protected NaturePublishingGroupArticleIterator(ArchivalUnit au,
SubTreeArticleIterator.Spec spec) {
super(au, spec);
}
@Override
protected ArticleFiles createArticleFiles(CachedUrl cu) {
String url = cu.getUrl();
Matcher mat;
mat = HTML_PATTERN.matcher(url);
if (mat.find()) {
if ("index".equalsIgnoreCase(mat.group(1))) {
return null; // HTTP 404 served as HTTP 200
}
return processFullTextHtml(cu, mat);
}
mat = PDF_PATTERN.matcher(url);
if (mat.find()) {
return processFullTextPdf(cu, mat);
}
log.warning("Mismatch between article iterator factory and article iterator: " + url);
return null;
}
protected ArticleFiles processFullTextHtml(CachedUrl htmlCu, Matcher htmlMat) {
ArticleFiles af = new ArticleFiles();
af.setFullTextCu(htmlCu);
af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_HTML, htmlCu);
guessFullTextPdf(af, htmlMat);
// guessOtherParts(af, htmlMat);
return af;
}
protected ArticleFiles processFullTextPdf(CachedUrl pdfCu, Matcher pdfMat) {
CachedUrl htmlCu = au.makeCachedUrl(pdfMat.replaceFirst("/full/$1.html"));
if (htmlCu != null && htmlCu.hasContent()) {
return null;
}
ArticleFiles af = new ArticleFiles();
af.setFullTextCu(pdfCu);
af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_PDF, pdfCu);
// guessOtherParts(af, pdfMat);
return af;
}
// protected void guessOtherParts(ArticleFiles af, Matcher mat) {
// guessAbstract(af, mat);
// guessFigures(af, mat);
// guessSupplementaryMaterials(af, mat);
// guessRisCitation(af, mat);
// }
protected void guessFullTextPdf(ArticleFiles af, Matcher mat) {
CachedUrl pdfCu = au.makeCachedUrl(mat.replaceFirst("/pdf/$1.pdf"));
if (pdfCu != null && pdfCu.hasContent()) {
af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_PDF, pdfCu);
}
}
// protected void guessAbstract(ArticleFiles af, Matcher mat) {
// CachedUrl absCu = au.makeCachedUrl(mat.replaceFirst("/abs/$1.html"));
// if (absCu != null && absCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_ABSTRACT, absCu);
// if (af.getRoleCu(ArticleFiles.ROLE_ARTICLE_METADATA) == null) {
// af.setRoleCu(ArticleFiles.ROLE_ARTICLE_METADATA, absCu);
// }
// }
// }
//
// protected void guessFigures(ArticleFiles af, Matcher mat) {
// CachedUrl absCu = au.makeCachedUrl(mat.replaceFirst("/fig_tab/$1_ft.html"));
// if (absCu != null && absCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_FIGURES_TABLES, absCu);
// }
// }
//
// protected void guessSupplementaryMaterials(ArticleFiles af, Matcher mat) {
// CachedUrl suppinfoCu = au.makeCachedUrl(mat.replaceFirst("/suppinfo/$1.html"));
// if (suppinfoCu != null && suppinfoCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_SUPPLEMENTARY_MATERIALS, suppinfoCu);
// }
// }
//
// protected void guessRisCitation(ArticleFiles af, Matcher mat) {
// CachedUrl risCu = au.makeCachedUrl(mat.replaceFirst("/ris/$1.ris"));
// if (risCu != null && risCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_CITATION + "_" + "application/x-research-info-systems", risCu);
// }
// }
}
public class NatureArticleMetadataExtractor implements ArticleMetadataExtractor {
public ArticleMetadata extract(ArticleFiles af)
throws IOException, PluginException {
ArticleMetadata am = null;
CachedUrl cu = af.getRoleCu(ArticleFiles.ROLE_ARTICLE_METADATA);
if (cu != null) {
FileMetadataExtractor me = cu.getFileMetadataExtractor();
if (me != null) {
am = me.extract(cu);
}
}
if (am == null || am.size() == 0) {
am = new ArticleMetadata();
}
am.put(ArticleMetadata.KEY_ACCESS_URL, af.getFullTextUrl());
return am;
}
}
}
| plugins/src/org/lockss/plugin/nature/NaturePublishingGroupArticleIteratorFactory.java | /*
* $Id: NaturePublishingGroupArticleIteratorFactory.java,v 1.7 2010-12-11 00:59:15 thib_gc Exp $
*/
/*
Copyright (c) 2000-2010 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.nature;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import org.lockss.util.*;
import org.lockss.plugin.*;
import org.lockss.extractor.*;
import org.lockss.daemon.PluginException;
public class NaturePublishingGroupArticleIteratorFactory
implements ArticleIteratorFactory,
ArticleMetadataExtractorFactory {
protected static Logger log = Logger.getLogger("NaturePublishingGroupArticleIteratorFactory");
protected static final String ROOT_TEMPLATE = "\"%s%s/journal/v%s/\", base_url, journal_id, volume_name";
protected static final String PATTERN_TEMPLATE = "\"^%s%s/journal/v[^/]+/n[^/]+/(full/[^/]+\\.html|pdf/[^/]+\\.pdf)$\", base_url, journal_id, volume_name";
/*
* The Nature URL structure means that the HTML for an article is
* at a URL like http://www.nature.com/gt/journal/v16/n5/full/gt200929a.html
* ie <base_url>/<journal_id>/journal/v<volume> is the subtree we want.
*/
public Iterator<ArticleFiles> createArticleIterator(ArchivalUnit au,
MetadataTarget target)
throws PluginException {
return new NaturePublishingGroupArticleIterator(au, new SubTreeArticleIterator.Spec()
.setTarget(target)
.setRootTemplate(ROOT_TEMPLATE)
.setPatternTemplate(PATTERN_TEMPLATE));
}
public ArticleMetadataExtractor createArticleMetadataExtractor(MetadataTarget target)
throws PluginException {
return new NatureArticleMetadataExtractor();
}
protected static class NaturePublishingGroupArticleIterator
extends SubTreeArticleIterator {
protected static Pattern HTML_PATTERN = Pattern.compile("/full/([^/]+)\\.html$", Pattern.CASE_INSENSITIVE);
protected static Pattern PDF_PATTERN = Pattern.compile("/pdf/([^/]+)\\.pdf$", Pattern.CASE_INSENSITIVE);
protected NaturePublishingGroupArticleIterator(ArchivalUnit au,
SubTreeArticleIterator.Spec spec) {
super(au, spec);
}
@Override
protected ArticleFiles createArticleFiles(CachedUrl cu) {
String url = cu.getUrl();
Matcher mat;
mat = HTML_PATTERN.matcher(url);
if (mat.find()) {
if ("index".equalsIgnoreCase(mat.group(1))) {
return null; // HTTP 404 served as HTTP 200
}
return processFullTextHtml(cu, mat);
}
mat = PDF_PATTERN.matcher(url);
if (mat.find()) {
return processFullTextPdf(cu, mat);
}
log.warning("Mismatch between article iterator factory and article iterator: " + url);
return null;
}
protected ArticleFiles processFullTextHtml(CachedUrl htmlCu, Matcher htmlMat) {
ArticleFiles af = new ArticleFiles();
af.setFullTextCu(htmlCu);
af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_HTML, htmlCu);
// guessFullTextPdf(af, htmlMat);
// guessOtherParts(af, htmlMat);
return af;
}
protected ArticleFiles processFullTextPdf(CachedUrl pdfCu, Matcher pdfMat) {
CachedUrl htmlCu = au.makeCachedUrl(pdfMat.replaceFirst("/full/$1.html"));
if (htmlCu != null && htmlCu.hasContent()) {
return null;
}
ArticleFiles af = new ArticleFiles();
af.setFullTextCu(pdfCu);
af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_PDF, pdfCu);
// guessOtherParts(af, pdfMat);
return af;
}
// protected void guessOtherParts(ArticleFiles af, Matcher mat) {
// guessAbstract(af, mat);
// guessFigures(af, mat);
// guessSupplementaryMaterials(af, mat);
// guessRisCitation(af, mat);
// }
//
// protected void guessFullTextPdf(ArticleFiles af, Matcher mat) {
// CachedUrl pdfCu = au.makeCachedUrl(mat.replaceFirst("/pdf/$1.pdf"));
// if (pdfCu != null && pdfCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_FULL_TEXT_PDF, pdfCu);
// }
// }
//
// protected void guessAbstract(ArticleFiles af, Matcher mat) {
// CachedUrl absCu = au.makeCachedUrl(mat.replaceFirst("/abs/$1.html"));
// if (absCu != null && absCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_ABSTRACT, absCu);
// if (af.getRoleCu(ArticleFiles.ROLE_ARTICLE_METADATA) == null) {
// af.setRoleCu(ArticleFiles.ROLE_ARTICLE_METADATA, absCu);
// }
// }
// }
//
// protected void guessFigures(ArticleFiles af, Matcher mat) {
// CachedUrl absCu = au.makeCachedUrl(mat.replaceFirst("/fig_tab/$1_ft.html"));
// if (absCu != null && absCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_FIGURES_TABLES, absCu);
// }
// }
//
// protected void guessSupplementaryMaterials(ArticleFiles af, Matcher mat) {
// CachedUrl suppinfoCu = au.makeCachedUrl(mat.replaceFirst("/suppinfo/$1.html"));
// if (suppinfoCu != null && suppinfoCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_SUPPLEMENTARY_MATERIALS, suppinfoCu);
// }
// }
//
// protected void guessRisCitation(ArticleFiles af, Matcher mat) {
// CachedUrl risCu = au.makeCachedUrl(mat.replaceFirst("/ris/$1.ris"));
// if (risCu != null && risCu.hasContent()) {
// af.setRoleCu(ArticleFiles.ROLE_CITATION + "_" + "application/x-research-info-systems", risCu);
// }
// }
}
public class NatureArticleMetadataExtractor implements ArticleMetadataExtractor {
public ArticleMetadata extract(ArticleFiles af)
throws IOException, PluginException {
ArticleMetadata am = null;
CachedUrl cu = af.getRoleCu(ArticleFiles.ROLE_ARTICLE_METADATA);
if (cu != null) {
FileMetadataExtractor me = cu.getFileMetadataExtractor();
if (me != null) {
am = me.extract(cu);
}
}
if (am == null || am.size() == 0) {
am = new ArticleMetadata();
}
am.put(ArticleMetadata.KEY_ACCESS_URL, af.getFullTextUrl());
return am;
}
}
}
| Satisfy testing dependency.
git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@10461 4f837ed2-42f5-46e7-a7a5-fa17313484d4
| plugins/src/org/lockss/plugin/nature/NaturePublishingGroupArticleIteratorFactory.java | Satisfy testing dependency. |
|
Java | bsd-3-clause | dc2ec8d90ddb8f41c626da98da461ba8363c8f5d | 0 | edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon | /*
* $Id: GeorgThiemeVerlagHtmlFilterFactory.java,v 1.2 2013-11-23 01:45:22 etenbrink Exp $
*/
/*
Copyright (c) 2000-2013 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, tMassachusettsMedicalSocietyHtmlFilterFactoryhe name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.georgthiemeverlag;
import java.io.*;
import org.htmlparser.*;
import org.htmlparser.filters.*;
import org.lockss.daemon.PluginException;
import org.lockss.filter.FilterUtil;
import org.lockss.filter.HtmlTagFilter;
import org.lockss.filter.HtmlTagFilter.TagPair;
import org.lockss.filter.WhiteSpaceFilter;
import org.lockss.filter.html.*;
import org.lockss.plugin.*;
import org.lockss.util.ListUtil;
import org.lockss.util.Logger;
import org.lockss.util.ReaderInputStream;
public class GeorgThiemeVerlagHtmlFilterFactory implements FilterFactory {
Logger log = Logger.getLogger(GeorgThiemeVerlagHtmlFilterFactory.class);
@Override
public InputStream createFilteredInputStream(ArchivalUnit au,
InputStream in,
String encoding)
throws PluginException {
// First filter with HtmlParser
NodeFilter[] filters = new NodeFilter[] {
// Hash filter
new TagNameFilter("script"),
// Remove header/footer items
// XXX remove 2 comments below when new HtmlFilterInputStream changes go into effect
// new TagNameFilter("header"),
// new TagNameFilter("footer"),
// Contains ads
HtmlNodeFilters.tagWithAttributeRegex("div", "id", "adSidebar[^\"]*"),
// Contains navigation items
HtmlNodeFilters.tagWithAttribute("div", "id", "navPanel"),
// Contains functional links, not content
HtmlNodeFilters.tagWithAttribute("div", "class", "pageFunctions"),
HtmlNodeFilters.tagWithAttribute("div", "class", "articleFunctions"),
HtmlNodeFilters.tagWithAttribute("span", "class", "articleCategories")
};
InputStream filtered = new HtmlFilterInputStream(in, encoding,
HtmlNodeFilterTransform.exclude(new OrFilter(filters)));
Reader filteredReader = FilterUtil.getReader(filtered, encoding);
Reader tagFilter = HtmlTagFilter.makeNestedFilter(filteredReader,
ListUtil.list(
// XXX remove this and next 3 lines when new HtmlFilterInputStream changes go into effect
new TagPair("<header id=\"pageHeader\">", "</header>"),
new TagPair("<footer>", "</footer>"),
// XXX
new TagPair("<!--[", "]-->"),
new TagPair("<!-- ", " -->")
));
return new ReaderInputStream(new WhiteSpaceFilter(tagFilter));
}
}
| plugins/src/org/lockss/plugin/georgthiemeverlag/GeorgThiemeVerlagHtmlFilterFactory.java | /*
* $Id: GeorgThiemeVerlagHtmlFilterFactory.java,v 1.1 2013-11-12 22:06:51 etenbrink Exp $
*/
/*
Copyright (c) 2000-2013 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, tMassachusettsMedicalSocietyHtmlFilterFactoryhe name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.georgthiemeverlag;
import java.io.*;
import org.htmlparser.*;
import org.htmlparser.filters.*;
import org.lockss.daemon.PluginException;
import org.lockss.filter.FilterUtil;
import org.lockss.filter.HtmlTagFilter;
import org.lockss.filter.HtmlTagFilter.TagPair;
import org.lockss.filter.WhiteSpaceFilter;
import org.lockss.filter.html.*;
import org.lockss.plugin.*;
import org.lockss.util.ListUtil;
import org.lockss.util.Logger;
import org.lockss.util.ReaderInputStream;
public class GeorgThiemeVerlagHtmlFilterFactory implements FilterFactory {
Logger log = Logger.getLogger(GeorgThiemeVerlagHtmlFilterFactory.class);
@Override
public InputStream createFilteredInputStream(ArchivalUnit au,
InputStream in,
String encoding)
throws PluginException {
// First filter with HtmlParser
NodeFilter[] filters = new NodeFilter[] {
// Hash filter
new TagNameFilter("script"),
// Header items
HtmlNodeFilters.tagWithAttribute("div", "id", "institutionName"),
HtmlNodeFilters.tagWithAttributeRegex("div", "id", "[^\"]+HeaderBar"),
// Contains ads
HtmlNodeFilters.tagWithAttributeRegex("div", "id", "adSidebar[^\"]*"),
// Contains navigation items
HtmlNodeFilters.tagWithAttribute("div", "id", "navPanel"),
// Contains functional links, not content
HtmlNodeFilters.tagWithAttribute("div", "class", "pageFunctions"),
HtmlNodeFilters.tagWithAttribute("div", "class", "articleFunctions"),
HtmlNodeFilters.tagWithAttribute("span", "class", "articleCategories")
};
InputStream filtered = new HtmlFilterInputStream(in, encoding,
HtmlNodeFilterTransform.exclude(new OrFilter(filters)));
Reader filteredReader = FilterUtil.getReader(filtered, encoding);
Reader tagFilter = HtmlTagFilter.makeNestedFilter(filteredReader,
ListUtil.list(
new TagPair("<header id=\"pageHeader\">", "</header>"),
new TagPair("<footer>", "</footer>"),
new TagPair("<!--[", "]-->"),
new TagPair("<!-- ", " -->")
));
return new ReaderInputStream(new WhiteSpaceFilter(tagFilter));
}
}
| Add comments to remove comments and code when HtmlFilterInputStream changes are in production
git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@31257 4f837ed2-42f5-46e7-a7a5-fa17313484d4
| plugins/src/org/lockss/plugin/georgthiemeverlag/GeorgThiemeVerlagHtmlFilterFactory.java | Add comments to remove comments and code when HtmlFilterInputStream changes are in production |
|
Java | mit | 11eda69af9ba0506fdc3815be1b88115658d8590 | 0 | Herve-M/UQAM-EMB7015 | package apdugenerator;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.security.*;
import java.security.spec.*;
import java.security.interfaces.*;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.smartcardio.*;
/**
*
* @author Hervé
*/
public class APDUGenerator {
//CLA ID
static final byte CLA_APPLET = (byte) 0xB0;
//
static final byte[] APPLET_AID = {(byte) 0xC3, (byte) 0x5E,
(byte) 0x4F, (byte) 0x14, (byte) 0x37, (byte) 0x6B};
//Applet State
static final byte STATE_INIT = 0;
static final byte STATE_ISSUED = 1;
////INSTRUCTION
//INIT
static final byte INS_SET_PUBLIC_MODULUS = (byte) 0x01;
static final byte INS_SET_PRIVATE_MODULUS = (byte) 0x02;
static final byte INS_SET_PRIVATE_EXP = (byte) 0x03;
static final byte INS_SET_PUBLIC_EXP = (byte) 0x04;
static final byte INS_SET_OWNER_PIN = (byte) 0x05;
static final byte INS_SET_ISSUED = (byte) 0x06;
//ISSUED
static final byte INS_VERIFICATION = (byte) 0x10;
static final byte INS_CREDIT = (byte) 0x20;
static final byte INS_DEBIT = (byte) 0x30;
static final byte INS_BALANCE = (byte) 0x40;
/**
* Generate a Private/Public key pair
*/
private void generateKeyPair() {
try {
System.out.println("Generating keys...");
KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA");
generator.initialize(512);
KeyPair keypair = generator.generateKeyPair();
RSAPublicKey publicKey = (RSAPublicKey) keypair.getPublic();
RSAPrivateKey privateKey = (RSAPrivateKey) keypair.getPrivate();
FileOutputStream publicKeyFile = new FileOutputStream("public.key");
publicKeyFile.write(publicKey.getEncoded());
publicKeyFile.close();
FileOutputStream privateKeyFile = new FileOutputStream("private.key");
privateKeyFile.write(privateKey.getEncoded());
privateKeyFile.close();
System.out.println("Modulus = " + publicKey.getModulus());
System.out.println("Public Exp = " + publicKey.getPublicExponent());
System.out.println("Private Exp = " + privateKey.getPrivateExponent());
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
/**
* Generic loading func.
* @param fileName
* @return
*/
private byte[] loadFile(String fileName) {
File file = new File(fileName);
FileInputStream fileInputStream = null;
try {
fileInputStream = new FileInputStream(file);
System.out.println("Total file size to read (in bytes) : "
+ fileInputStream.available());
int length = fileInputStream.available();
byte[] data = new byte[length];
fileInputStream.read(data);
fileInputStream.close();
return data;
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (fileInputStream != null) {
fileInputStream.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
return null;
}
/**
* Convert BigInt to Byte[]
* @param big
* @return
*/
byte[] getBytes(BigInteger big) {
byte[] data = big.toByteArray();
if (data[0] == 0) {
byte[] tmp = data;
data = new byte[tmp.length - 1];
System.arraycopy(tmp, 1, data, 0, tmp.length - 1);
}
return data;
}
/**
* Make a displayable byte[]
* @param in
* @return
*/
String byteToStr(byte[] in) {
StringBuilder out = new StringBuilder();
for (byte b : in) {
out.append("0x"+String.format("%02X ", b));
}
return out.toString();
}
public void setPublicKey() {
try {
byte[] data = loadFile("public.key");
X509EncodedKeySpec spec = new X509EncodedKeySpec(data);
KeyFactory factory = KeyFactory.getInstance("RSA");
RSAPublicKey key = (RSAPublicKey) factory.generatePublic(spec);
byte[] modulus = getBytes(key.getModulus());
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_SET_PUBLIC_MODULUS, (byte) 0,
(byte) 0, modulus);
System.out.println("APDU for setting Public Key Modulus :");
System.out.println(byteToStr(capdu.getBytes()));
byte[] exponent = getBytes(key.getPublicExponent());
capdu = new CommandAPDU(CLA_APPLET, INS_SET_PUBLIC_EXP, (byte) 0,
(byte) 0, exponent);
System.out.println("APDU for setting Public Key Exp :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void setPrivateKey(){
try{
byte[] data = loadFile("private.key");
PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(data);
KeyFactory factory = KeyFactory.getInstance("RSA");
RSAPrivateKey key = (RSAPrivateKey) factory.generatePrivate(spec);
byte[] modulus = getBytes(key.getModulus());
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_SET_PRIVATE_MODULUS, (byte) 0,
(byte) 0, modulus);
System.out.println("APDU for setting Private Key Modulus :");
System.out.println(byteToStr(capdu.getBytes()));
byte[] exponent = getBytes(key.getPrivateExponent());
capdu = new CommandAPDU(CLA_APPLET, INS_SET_PRIVATE_EXP, (byte) 0,
(byte) 0, exponent);
System.out.println("APDU for setting Private Key Exp :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void setOwnerPin(){
try {
System.out.println("Enter PIN :");
Scanner scanner = new Scanner(System.in);
BigInteger choice = scanner.nextBigInteger();
//TODO: add test ? <=4
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_SET_OWNER_PIN, (byte) 0,
(byte) 0, choice.toByteArray());
System.out.println("APDU for setting Owner Pin :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void verifyPin(){
try {
System.out.println("Enter PIN :");
Scanner scanner = new Scanner(System.in);
BigInteger choice = scanner.nextBigInteger();
//TODO: add test ? <=4
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_VERIFICATION, (byte) 0,
(byte) 0, choice.toByteArray());
System.out.println("APDU for Pin verification :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void getCreditApdu(){
try {
System.out.println("Enter how many to credit :");
Scanner scanner = new Scanner(System.in);
BigInteger choice = scanner.nextBigInteger();
//TODO: add test ? <=4
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_CREDIT, (byte) 0,
(byte) 0, choice.toByteArray());
System.out.println("APDU for Credit Op. :"+capdu.getData().length);
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void getDebitApdu(){
try {
System.out.println("Enter how many to debit :");
Scanner scanner = new Scanner(System.in);
BigInteger choice = scanner.nextBigInteger();
//TODO: add test ? <=4
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_DEBIT, (byte) 0,
(byte) 0, choice.toByteArray());
System.out.println("APDU for Debit Op. :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void getBalanceApdu(){
try {
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_BALANCE, (byte) 0,
(byte) 0, (byte) 0);
System.out.println("APDU for Balance Op. :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void printMenu(){
System.out.println("Select one option :");
System.out.println("1 - Generate KeyPair");
System.out.println("2 - Get Private Key APDU");
System.out.println("3 - Get Public Key APDU");
System.out.println("4 - Get PIN APDU");
System.out.println("5 - Get PIN Verification APDU");
System.out.println("6 - Get Credit APDU");
System.out.println("7 - Get Debit APDU");
System.out.println("8 - Get Balance APDU");
System.out.println("9 - Quit");
Scanner scanner = new Scanner(System.in);
int choice = scanner.nextInt();
switch (choice) {
case 1:
this.generateKeyPair();
break;
case 2:
this.setPrivateKey();
break;
case 3:
this.setPublicKey();
break;
case 4:
this.setOwnerPin();
return;
case 5:
this.verifyPin();
return;
case 6:
this.getCreditApdu();
return;
case 7:
this.getDebitApdu();
return;
case 8:
this.getBalanceApdu();
return;
case 9:
return;
default:
this.printMenu();
}
}
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
APDUGenerator apdug = new APDUGenerator();
apdug.printMenu();
}
}
| APDUGenerator/src/apdugenerator/APDUGenerator.java | package apdugenerator;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.security.*;
import java.security.spec.*;
import java.security.interfaces.*;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.smartcardio.*;
/**
*
* @author Hervé
*/
public class APDUGenerator {
//CLA ID
static final byte CLA_APPLET = (byte) 0xB0;
//
static final byte[] APPLET_AID = {(byte) 0xC3, (byte) 0x5E,
(byte) 0x4F, (byte) 0x14, (byte) 0x37, (byte) 0x6B};
//Applet State
static final byte STATE_INIT = 0;
static final byte STATE_ISSUED = 1;
////INSTRUCTION
//INIT
static final byte INS_SET_PUBLIC_MODULUS = (byte) 0x01;
static final byte INS_SET_PRIVATE_MODULUS = (byte) 0x02;
static final byte INS_SET_PRIVATE_EXP = (byte) 0x03;
static final byte INS_SET_PUBLIC_EXP = (byte) 0x04;
static final byte INS_SET_OWNER_PIN = (byte) 0x05;
static final byte INS_SET_ISSUED = (byte) 0x06;
//ISSUED
static final byte INS_VERIFICATION = (byte) 0x10;
static final byte INS_CREDIT = (byte) 0x20;
static final byte INS_DEBIT = (byte) 0x30;
static final byte INS_BALANCE = (byte) 0x40;
/**
* Generate a Private/Public key pair
*/
private void generateKeyPair() {
try {
System.out.println("Generating keys...");
KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA");
generator.initialize(512);
KeyPair keypair = generator.generateKeyPair();
RSAPublicKey publicKey = (RSAPublicKey) keypair.getPublic();
RSAPrivateKey privateKey = (RSAPrivateKey) keypair.getPrivate();
FileOutputStream publicKeyFile = new FileOutputStream("public.key");
publicKeyFile.write(publicKey.getEncoded());
publicKeyFile.close();
FileOutputStream privateKeyFile = new FileOutputStream("private.key");
privateKeyFile.write(privateKey.getEncoded());
privateKeyFile.close();
System.out.println("Modulus = " + publicKey.getModulus());
System.out.println("Public Exp = " + publicKey.getPublicExponent());
System.out.println("Private Exp = " + privateKey.getPrivateExponent());
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
/**
* Generic loading func.
* @param fileName
* @return
*/
private byte[] loadFile(String fileName) {
File file = new File(fileName);
FileInputStream fileInputStream = null;
try {
fileInputStream = new FileInputStream(file);
System.out.println("Total file size to read (in bytes) : "
+ fileInputStream.available());
int length = fileInputStream.available();
byte[] data = new byte[length];
fileInputStream.read(data);
fileInputStream.close();
return data;
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (fileInputStream != null) {
fileInputStream.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
return null;
}
/**
* Convert BigInt to Byte[]
* @param big
* @return
*/
byte[] getBytes(BigInteger big) {
byte[] data = big.toByteArray();
if (data[0] == 0) {
byte[] tmp = data;
data = new byte[tmp.length - 1];
System.arraycopy(tmp, 1, data, 0, tmp.length - 1);
}
return data;
}
/**
* Make a displayable byte[]
* @param in
* @return
*/
String byteToStr(byte[] in) {
StringBuilder out = new StringBuilder();
for (byte b : in) {
out.append("0x"+String.format("%02X ", b));
}
return out.toString();
}
public void setPublicKey() {
try {
byte[] data = loadFile("public.key");
X509EncodedKeySpec spec = new X509EncodedKeySpec(data);
KeyFactory factory = KeyFactory.getInstance("RSA");
RSAPublicKey key = (RSAPublicKey) factory.generatePublic(spec);
byte[] modulus = getBytes(key.getModulus());
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_SET_PUBLIC_MODULUS, (byte) 0,
(byte) 0, modulus);
System.out.println("APDU for setting Public Key Modulus :");
System.out.println(byteToStr(capdu.getBytes()));
byte[] exponent = getBytes(key.getPublicExponent());
capdu = new CommandAPDU(CLA_APPLET, INS_SET_PUBLIC_EXP, (byte) 0,
(byte) 0, exponent);
System.out.println("APDU for setting Public Key Exp :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void setPrivateKey(){
try{
byte[] data = loadFile("private.key");
PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(data);
KeyFactory factory = KeyFactory.getInstance("RSA");
RSAPrivateKey key = (RSAPrivateKey) factory.generatePrivate(spec);
byte[] modulus = getBytes(key.getModulus());
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_SET_PRIVATE_MODULUS, (byte) 0,
(byte) 0, modulus);
System.out.println("APDU for setting Private Key Modulus :");
System.out.println(byteToStr(capdu.getBytes()));
byte[] exponent = getBytes(key.getPrivateExponent());
capdu = new CommandAPDU(CLA_APPLET, INS_SET_PRIVATE_EXP, (byte) 0,
(byte) 0, exponent);
System.out.println("APDU for setting Private Key Exp :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void setOwnerPin(){
try {
System.out.println("Enter PIN :");
Scanner scanner = new Scanner(System.in);
int choice = scanner.nextInt();
//TODO: add test ? <=4
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_SET_OWNER_PIN, (byte) 0,
(byte) 0, choice);
System.out.println("APDU for setting Owner Pin :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void verifyPin(){
try {
System.out.println("Enter PIN :");
Scanner scanner = new Scanner(System.in);
int choice = scanner.nextInt();
//TODO: add test ? <=4
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_VERIFICATION, (byte) 0,
(byte) 0, choice);
System.out.println("APDU for Pin verification :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void getCreditApdu(){
try {
System.out.println("Enter how many to credit :");
Scanner scanner = new Scanner(System.in);
int choice = scanner.nextInt();
//TODO: add test ? <=4
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_CREDIT, (byte) 0,
(byte) 0, choice);
System.out.println("APDU for Credit Op. :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void getDebitApdu(){
try {
System.out.println("Enter how many to debit :");
Scanner scanner = new Scanner(System.in);
int choice = scanner.nextInt();
//TODO: add test ? <=4
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_DEBIT, (byte) 0,
(byte) 0, choice);
System.out.println("APDU for Debit Op. :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void getBalanceApdu(){
try {
CommandAPDU capdu;
capdu = new CommandAPDU(CLA_APPLET, INS_BALANCE, (byte) 0,
(byte) 0, 0);
System.out.println("APDU for Balance Op. :");
System.out.println(byteToStr(capdu.getBytes()));
} catch (Exception ex) {
Logger.getLogger(APDUGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void printMenu(){
System.out.println("Select one option :");
System.out.println("1 - Generate KeyPair");
System.out.println("2 - Get Private Key APDU");
System.out.println("3 - Get Public Key APDU");
System.out.println("4 - Get PIN APDU");
System.out.println("5 - Get PIN Verification APDU");
System.out.println("6 - Get Credit APDU");
System.out.println("7 - Get Debit APDU");
System.out.println("8 - Get Balance APDU");
System.out.println("9 - Quit");
Scanner scanner = new Scanner(System.in);
int choice = scanner.nextInt();
switch (choice) {
case 1:
this.generateKeyPair();
break;
case 2:
this.setPrivateKey();
break;
case 3:
this.setPublicKey();
break;
case 4:
this.setOwnerPin();
return;
case 5:
this.verifyPin();
return;
case 6:
this.getCreditApdu();
return;
case 7:
this.getDebitApdu();
return;
case 8:
this.getBalanceApdu();
return;
case 9:
return;
default:
this.printMenu();
}
}
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
APDUGenerator apdug = new APDUGenerator();
apdug.printMenu();
}
}
| Fix some bugs. | APDUGenerator/src/apdugenerator/APDUGenerator.java | Fix some bugs. |
|
Java | epl-1.0 | 7acf9e43416a0e3ad3aa2f1d633549422a101c23 | 0 | rohitmohan96/ceylon-ide-eclipse,rohitmohan96/ceylon-ide-eclipse | package com.redhat.ceylon.eclipse.code.refactor;
import static com.redhat.ceylon.eclipse.code.editor.Util.getCurrentEditor;
import static org.eclipse.core.resources.ResourcesPlugin.getWorkspace;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.jface.text.ITextSelection;
import org.eclipse.ltk.core.refactoring.PerformChangeOperation;
import org.eclipse.ltk.core.refactoring.TextFileChange;
import org.eclipse.text.edits.InsertEdit;
import org.eclipse.text.edits.MultiTextEdit;
import org.eclipse.text.edits.ReplaceEdit;
import org.eclipse.ui.IFileEditorInput;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.ProducedType;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.LocalModifier;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
import com.redhat.ceylon.eclipse.code.editor.CeylonEditor;
import com.redhat.ceylon.eclipse.code.quickfix.CeylonQuickFixAssistant;
public class RevealInferredTypeHandler extends AbstractHandler {
@Override
public boolean isEnabled() {
CeylonEditor editor = (CeylonEditor) getCurrentEditor();
List<Tree.LocalModifier> localModifiers = new ArrayList<Tree.LocalModifier>();
List<Tree.ValueIterator> valueIterators = new ArrayList<Tree.ValueIterator>();
findCandidatesForRevelation(editor, localModifiers, valueIterators);
return !localModifiers.isEmpty() || !valueIterators.isEmpty();
}
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
CeylonEditor editor = (CeylonEditor) getCurrentEditor();
Tree.CompilationUnit rootNode = editor.getParseController().getRootNode();
Set<Declaration> imports = new HashSet<Declaration>();
List<Tree.LocalModifier> localModifiers = new ArrayList<Tree.LocalModifier>();
List<Tree.ValueIterator> valueIterators = new ArrayList<Tree.ValueIterator>();
findCandidatesForRevelation(editor, localModifiers, valueIterators);
if( !localModifiers.isEmpty() || !valueIterators.isEmpty() ) {
TextFileChange tfc = new TextFileChange("Reveal Inferred Types", ((IFileEditorInput) editor.getEditorInput()).getFile());
tfc.setEdit(new MultiTextEdit());
for (Tree.LocalModifier localModifier : localModifiers) {
if( localModifier.getStartIndex() != null && localModifier.getTypeModel() != null ) {
ProducedType pt = localModifier.getTypeModel();
tfc.addEdit(new ReplaceEdit(
localModifier.getStartIndex(),
localModifier.getText().length(),
pt.getProducedTypeName()));
CeylonQuickFixAssistant.importType(imports, pt, rootNode);
}
}
for (Tree.ValueIterator valueIterator : valueIterators) {
Tree.Variable variable = valueIterator.getVariable();
if( variable != null
&& variable.getStartIndex() != null
&& variable.getType() != null
&& variable.getType().getTypeModel() != null ) {
ProducedType pt = variable.getType().getTypeModel();
tfc.addEdit(new InsertEdit(
variable.getStartIndex(),
pt.getProducedTypeName() + " "));
CeylonQuickFixAssistant.importType(imports, variable.getType().getTypeModel(), rootNode);
}
}
CeylonQuickFixAssistant.applyImports(tfc, imports, rootNode);
try {
tfc.initializeValidationData(null);
getWorkspace().run(new PerformChangeOperation(tfc), new NullProgressMonitor());
} catch (CoreException ce) {
throw new ExecutionException("Error reveal inferred types", ce);
}
}
return null;
}
private void findCandidatesForRevelation(CeylonEditor editor, final List<Tree.LocalModifier> localModifiers, final List<Tree.ValueIterator> valueIterators) {
if (editor != null &&
editor.getParseController() != null &&
editor.getParseController().getRootNode() != null &&
editor.getSelectionProvider() != null &&
editor.getSelectionProvider().getSelection() != null) {
final ITextSelection selection = (ITextSelection) editor.getSelectionProvider().getSelection();
final int selectionStart = selection.getOffset();
final int selectionStop = selection.getOffset() + selection.getLength();
editor.getParseController().getRootNode().visit(new Visitor() {
@Override
public void visit(Tree.TypedDeclaration typedDeclaration) {
if( isInSelection(typedDeclaration) ) {
Tree.Type type = typedDeclaration.getType();
if( type instanceof Tree.LocalModifier && type.getToken() != null ) {
localModifiers.add((LocalModifier) type);
}
}
super.visit(typedDeclaration);
}
@Override
public void visit(Tree.ValueIterator valueIterator) {
if (isInSelection(valueIterator)) {
Tree.Variable variable = valueIterator.getVariable();
Tree.Type type = variable.getType();
if (type instanceof Tree.ValueModifier) {
valueIterators.add(valueIterator);
}
}
super.visit(valueIterator);
}
private boolean isInSelection(Node node) {
Integer startIndex = node.getStartIndex();
Integer stopIndex = node.getStopIndex();
if (startIndex != null && stopIndex != null) {
if (selection.getLength() == 0 /* if selection is empty, process whole file */ ||
(startIndex >= selectionStart && stopIndex <= selectionStop) ) {
return true;
}
}
return false;
}
});
}
}
} | plugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/code/refactor/RevealInferredTypeHandler.java | package com.redhat.ceylon.eclipse.code.refactor;
import static com.redhat.ceylon.eclipse.code.editor.Util.getCurrentEditor;
import static org.eclipse.core.resources.ResourcesPlugin.getWorkspace;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.jface.text.ITextSelection;
import org.eclipse.ltk.core.refactoring.PerformChangeOperation;
import org.eclipse.ltk.core.refactoring.TextFileChange;
import org.eclipse.text.edits.InsertEdit;
import org.eclipse.text.edits.MultiTextEdit;
import org.eclipse.text.edits.ReplaceEdit;
import org.eclipse.ui.IFileEditorInput;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.ProducedType;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.LocalModifier;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
import com.redhat.ceylon.eclipse.code.editor.CeylonEditor;
import com.redhat.ceylon.eclipse.code.quickfix.CeylonQuickFixAssistant;
public class RevealInferredTypeHandler extends AbstractHandler {
@Override
public boolean isEnabled() {
CeylonEditor editor = (CeylonEditor) getCurrentEditor();
List<Tree.LocalModifier> localModifiers = new ArrayList<Tree.LocalModifier>();
List<Tree.ValueIterator> valueIterators = new ArrayList<Tree.ValueIterator>();
findCandidatesForRevelation(editor, localModifiers, valueIterators);
return !localModifiers.isEmpty() || !valueIterators.isEmpty();
}
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
CeylonEditor editor = (CeylonEditor) getCurrentEditor();
Tree.CompilationUnit rootNode = editor.getParseController().getRootNode();
Set<Declaration> imports = new HashSet<Declaration>();
List<Tree.LocalModifier> localModifiers = new ArrayList<Tree.LocalModifier>();
List<Tree.ValueIterator> valueIterators = new ArrayList<Tree.ValueIterator>();
findCandidatesForRevelation(editor, localModifiers, valueIterators);
if( !localModifiers.isEmpty() || !valueIterators.isEmpty() ) {
TextFileChange tfc = new TextFileChange("Reveal Inferred Types", ((IFileEditorInput) editor.getEditorInput()).getFile());
tfc.setEdit(new MultiTextEdit());
for (Tree.LocalModifier localModifier : localModifiers) {
if( localModifier.getStartIndex() != null && localModifier.getTypeModel() != null ) {
ProducedType pt = localModifier.getTypeModel();
tfc.addEdit(new ReplaceEdit(
localModifier.getStartIndex(),
localModifier.getText().length(),
pt.getProducedTypeName()));
CeylonQuickFixAssistant.importType(imports, pt, rootNode);
}
}
for (Tree.ValueIterator valueIterator : valueIterators) {
Tree.Variable variable = valueIterator.getVariable();
if( variable != null
&& variable.getStartIndex() != null
&& variable.getType() != null
&& variable.getType().getTypeModel() != null ) {
ProducedType pt = variable.getType().getTypeModel();
tfc.addEdit(new InsertEdit(
variable.getStartIndex(),
pt.getProducedTypeName() + " "));
CeylonQuickFixAssistant.importType(imports, variable.getType().getTypeModel(), rootNode);
}
}
CeylonQuickFixAssistant.applyImports(tfc, imports, rootNode);
try {
tfc.initializeValidationData(null);
getWorkspace().run(new PerformChangeOperation(tfc), new NullProgressMonitor());
} catch (CoreException ce) {
throw new ExecutionException("Error reveal inferred types", ce);
}
}
return null;
}
private void findCandidatesForRevelation(CeylonEditor editor, final List<Tree.LocalModifier> localModifiers, final List<Tree.ValueIterator> valueIterators) {
if (editor != null &&
editor.getParseController() != null &&
editor.getParseController().getRootNode() != null &&
editor.getSelectionProvider() != null &&
editor.getSelectionProvider().getSelection() != null) {
ITextSelection selection = (ITextSelection) editor.getSelectionProvider().getSelection();
final int selectionStart = selection.getOffset();
final int selectionStop = selection.getOffset() + selection.getLength();
editor.getParseController().getRootNode().visit(new Visitor() {
@Override
public void visit(Tree.TypedDeclaration typedDeclaration) {
if( isInSelection(typedDeclaration) ) {
Tree.Type type = typedDeclaration.getType();
if( type instanceof Tree.LocalModifier && type.getToken() != null ) {
localModifiers.add((LocalModifier) type);
}
}
super.visit(typedDeclaration);
}
@Override
public void visit(Tree.ValueIterator valueIterator) {
if (isInSelection(valueIterator)) {
Tree.Variable variable = valueIterator.getVariable();
Tree.Type type = variable.getType();
if (type instanceof Tree.ValueModifier) {
valueIterators.add(valueIterator);
}
}
super.visit(valueIterator);
}
private boolean isInSelection(Node node) {
Integer startIndex = node.getStartIndex();
Integer stopIndex = node.getStopIndex();
if (startIndex != null && stopIndex != null) {
if (selectionStart == selectionStop) {
if (startIndex <= selectionStart && stopIndex >= selectionStart) {
return true;
}
} else {
if (startIndex >= selectionStart && stopIndex <= selectionStop) {
return true;
}
}
}
return false;
}
});
}
}
} | #533 Reveal inferred types (in whole files) | plugins/com.redhat.ceylon.eclipse.ui/src/com/redhat/ceylon/eclipse/code/refactor/RevealInferredTypeHandler.java | #533 Reveal inferred types (in whole files) |
|
Java | agpl-3.0 | 3f6c358859b91d24ee96912acea040537143610b | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 7795044c-2e60-11e5-9284-b827eb9e62be | hello.java | 778f9caa-2e60-11e5-9284-b827eb9e62be | 7795044c-2e60-11e5-9284-b827eb9e62be | hello.java | 7795044c-2e60-11e5-9284-b827eb9e62be |
|
Java | agpl-3.0 | d1d7c51cfbe3eae3054a1e0346a4855a05c1f65d | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | fc60f758-2e60-11e5-9284-b827eb9e62be | hello.java | fc5b85f2-2e60-11e5-9284-b827eb9e62be | fc60f758-2e60-11e5-9284-b827eb9e62be | hello.java | fc60f758-2e60-11e5-9284-b827eb9e62be |
|
Java | agpl-3.0 | 8aa396727b14f60853879187a7b3a0c97690a100 | 0 | PaulKh/scale-proactive,paraita/programming,acontes/programming,jrochas/scale-proactive,PaulKh/scale-proactive,fviale/programming,lpellegr/programming,fviale/programming,jrochas/scale-proactive,fviale/programming,jrochas/scale-proactive,mnip91/programming-multiactivities,mnip91/programming-multiactivities,mnip91/proactive-component-monitoring,mnip91/proactive-component-monitoring,PaulKh/scale-proactive,jrochas/scale-proactive,mnip91/programming-multiactivities,acontes/programming,acontes/programming,fviale/programming,ow2-proactive/programming,fviale/programming,acontes/programming,paraita/programming,mnip91/programming-multiactivities,mnip91/proactive-component-monitoring,acontes/programming,acontes/programming,jrochas/scale-proactive,mnip91/proactive-component-monitoring,mnip91/programming-multiactivities,PaulKh/scale-proactive,ow2-proactive/programming,PaulKh/scale-proactive,mnip91/proactive-component-monitoring,ow2-proactive/programming,ow2-proactive/programming,PaulKh/scale-proactive,acontes/programming,jrochas/scale-proactive,ow2-proactive/programming,lpellegr/programming,lpellegr/programming,PaulKh/scale-proactive,ow2-proactive/programming,mnip91/proactive-component-monitoring,lpellegr/programming,paraita/programming,mnip91/programming-multiactivities,jrochas/scale-proactive,fviale/programming,lpellegr/programming,paraita/programming,paraita/programming,lpellegr/programming,paraita/programming | /*
* ################################################################
*
* ProActive: The Java(TM) library for Parallel, Distributed,
* Concurrent computing with Security and Mobility
*
* Copyright (C) 1997-2009 INRIA/University of Nice-Sophia Antipolis
* Contact: [email protected]
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version
* 2 of the License, or any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* Initial developer(s): The ActiveEon Team
* http://www.activeeon.com/
* Contributor(s):
*
*
* ################################################################
* $$ACTIVEEON_INITIAL_DEV$$
*/
package org.objectweb.proactive.extra.messagerouting.router;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.log4j.Logger;
import org.objectweb.proactive.core.util.SweetCountDownLatch;
import org.objectweb.proactive.core.util.log.Loggers;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
import org.objectweb.proactive.extra.messagerouting.protocol.AgentID;
import org.objectweb.proactive.extra.messagerouting.protocol.message.ErrorMessage;
import org.objectweb.proactive.extra.messagerouting.protocol.message.ErrorMessage.ErrorType;
/**
*
* @since ProActive 4.1.0
*/
public class RouterImpl extends RouterInternal implements Runnable {
public static final Logger logger = ProActiveLogger.getLogger(Loggers.FORWARDING_ROUTER);
/** Read {@link ByteBuffer} size. */
private final static int READ_BUFFER_SIZE = 4096;
/** True is the router must stop or is stopped*/
private final AtomicBoolean stopped = new AtomicBoolean(false);
/** Can pass when the router has been successfully shutdown */
private final SweetCountDownLatch isStopped = new SweetCountDownLatch(1);
/** The thread running the select loop */
private final AtomicReference<Thread> selectThread = new AtomicReference<Thread>();
/** Thread pool used to execute all asynchronous tasks */
private final ExecutorService tpe;
/** All the clients known by {@link AgentID}*/
private final ConcurrentHashMap<AgentID, Client> clientMap = new ConcurrentHashMap<AgentID, Client>();
/** The local InetAddress on which the router is listening */
private InetAddress inetAddress;
/** The local TCP port on which the router is listening */
private int port;
private Selector selector = null;
private ServerSocketChannel ssc = null;
private ServerSocket serverSocket = null;
/** Create a new router
*
* When a new router is created it binds onto the given port.
*
* To start handling connections, a thread MUST be spawned by the caller.
* <code>
* RouterImpl this.router = new RouterImpl(0);
* Thread t = new Thread(router);
* t.setDaemon(true);
* t.setName("Router");
* t.start();
* </code>
*
* @param port port number to bind to
* @throws IOException if the router failed to bind
*/
RouterImpl(RouterConfig config) throws IOException {
init(config);
tpe = Executors.newFixedThreadPool(config.getNbWorkerThreads());
}
private void init(RouterConfig config) throws IOException {
// Create a new selector
selector = Selector.open();
// Open a listener on the right port
ssc = ServerSocketChannel.open();
ssc.configureBlocking(false);
serverSocket = ssc.socket();
this.inetAddress = config.getInetAddress();
this.port = config.getPort();
InetSocketAddress isa = new InetSocketAddress(this.inetAddress, this.port);
this.inetAddress = isa.getAddress();
serverSocket.bind(isa);
this.port = serverSocket.getLocalPort();
logger.info("Message router listening on " + serverSocket.toString());
// register the listener with the selector
ssc.register(selector, SelectionKey.OP_ACCEPT);
}
public void run() {
boolean r = this.selectThread.compareAndSet(null, Thread.currentThread());
if (r == false) {
logger.error("A select thread has already been started, aborting the current thread ",
new Exception());
return;
}
Set<SelectionKey> selectedKeys = null;
Iterator<SelectionKey> it;
SelectionKey key;
while (this.stopped.get() == false) {
// select new keys
try {
selector.select();
selectedKeys = selector.selectedKeys();
it = selectedKeys.iterator();
while (it.hasNext()) {
key = (SelectionKey) it.next();
it.remove();
if ((key.readyOps() & SelectionKey.OP_ACCEPT) == SelectionKey.OP_ACCEPT) {
this.handleAccept(key);
} else if ((key.readyOps() & SelectionKey.OP_READ) == SelectionKey.OP_READ) {
this.handleRead(key);
} else {
logger.warn("Unhandled SelectionKey operation");
}
}
} catch (IOException e) {
logger.warn("Select failed", e);
}
}
this.cleanup();
}
/** Stop the router and free all resources*/
private void cleanup() {
tpe.shutdown();
for (Client client : clientMap.values()) {
client.discardAttachment();
}
try {
/* Not sure if we have to set the attachments to null
* Possible memory leak
*/
this.ssc.socket().close();
this.ssc.close();
this.selector.close();
} catch (IOException e) {
ProActiveLogger.logEatedException(logger, e);
}
this.isStopped.countDown();
}
/** Accept a new connection */
private void handleAccept(SelectionKey key) {
SocketChannel sc;
try {
sc = ((ServerSocketChannel) key.channel()).accept();
sc.configureBlocking(false);
// Add the new connection to the selector
sc.register(selector, SelectionKey.OP_READ);
} catch (IOException e) {
logger.warn("Failed to accept a new connection", e);
}
}
/** Read available data for this key */
private void handleRead(SelectionKey key) {
SocketChannel sc;
ByteBuffer buffer = ByteBuffer.allocate(READ_BUFFER_SIZE);
sc = (SocketChannel) key.channel();
Attachment attachment = (Attachment) key.attachment();
if (attachment == null) {
attachment = new Attachment(this, sc);
key.attach(attachment);
}
// Read all the data available
try {
int byteRead;
do {
buffer.clear();
byteRead = sc.read(buffer);
buffer.flip();
if (byteRead > 0) {
MessageAssembler assembler = attachment.getAssembler();
assembler.pushBuffer(buffer);
}
} while (byteRead > 0);
if (byteRead == -1) {
clientDisconnected(key);
}
} catch (IOException e) {
clientDisconnected(key);
} catch (IllegalStateException e) {
// Disconnect the client to avoid a disaster
clientDisconnected(key);
}
}
/** clean everything when a client disconnect */
private void clientDisconnected(SelectionKey key) {
Attachment attachment = (Attachment) key.attachment();
key.cancel();
key.attach(null);
SocketChannel sc = (SocketChannel) key.channel();
try {
sc.socket().close();
} catch (IOException e) {
// Miam Miam Miam
ProActiveLogger.logEatedException(logger, e);
}
try {
sc.close();
} catch (IOException e) {
// Miam Miam Miam
ProActiveLogger.logEatedException(logger, e);
}
Client client = attachment.getClient();
if (client != null) {
client.discardAttachment();
// Broadcast the disconnection to every client
// If client is null, then the handshake has not completed and we
// don't need to broadcast the disconnection
AgentID disconnectedAgent = client.getAgentId();
Collection<Client> clients = clientMap.values();
tpe.submit(new DisconnectionBroadcaster(clients, disconnectedAgent));
}
logger.debug("Client " + sc.socket() + " disconnected");
}
/* @@@@@@@@@@ ROUTER PACKAGE INTERFACE
*
* Theses methods cannot be package private due to the processor sub package
*/
public void handleAsynchronously(ByteBuffer message, Attachment attachment) {
TopLevelProcessor tlp = new TopLevelProcessor(message, attachment, this);
tpe.execute(tlp);
}
public Client getClient(AgentID agentId) {
synchronized (clientMap) {
return clientMap.get(agentId);
}
}
public void addClient(Client client) {
synchronized (clientMap) {
clientMap.put(client.getAgentId(), client);
}
}
/* @@@@@@@@@@ ROUTER PUBLIC INTERFACE: Router */
public int getPort() {
return this.port;
}
public InetAddress getInetAddr() {
return this.inetAddress;
}
public void stop() {
if (this.stopped.get() == true)
throw new IllegalStateException("Router already stopped");
this.stopped.set(true);
Thread t = this.selectThread.get();
if (t != null) {
t.interrupt();
this.isStopped.await();
}
}
private static class DisconnectionBroadcaster implements Runnable {
final private List<Client> clients;
final private AgentID disconnectedAgent;
public DisconnectionBroadcaster(Collection<Client> clients, AgentID disconnectedAgent) {
this.clients = new ArrayList<Client>(clients);
this.disconnectedAgent = disconnectedAgent;
}
public void run() {
for (Client client : this.clients) {
ErrorMessage error = new ErrorMessage(ErrorType.ERR_DISCONNECTION_BROADCAST, client
.getAgentId(), this.disconnectedAgent, 0);
try {
client.sendMessage(error.toByteArray());
} catch (Exception e) {
ProActiveLogger.logEatedException(logger, e);
}
}
}
}
}
| src/Extra/org/objectweb/proactive/extra/messagerouting/router/RouterImpl.java | /*
* ################################################################
*
* ProActive: The Java(TM) library for Parallel, Distributed,
* Concurrent computing with Security and Mobility
*
* Copyright (C) 1997-2009 INRIA/University of Nice-Sophia Antipolis
* Contact: [email protected]
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version
* 2 of the License, or any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* Initial developer(s): The ActiveEon Team
* http://www.activeeon.com/
* Contributor(s):
*
*
* ################################################################
* $$ACTIVEEON_INITIAL_DEV$$
*/
package org.objectweb.proactive.extra.messagerouting.router;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.log4j.Logger;
import org.objectweb.proactive.core.util.SweetCountDownLatch;
import org.objectweb.proactive.core.util.log.Loggers;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
import org.objectweb.proactive.extra.messagerouting.protocol.AgentID;
import org.objectweb.proactive.extra.messagerouting.protocol.message.ErrorMessage;
import org.objectweb.proactive.extra.messagerouting.protocol.message.ErrorMessage.ErrorType;
/**
*
* @since ProActive 4.1.0
*/
public class RouterImpl extends RouterInternal implements Runnable {
public static final Logger logger = ProActiveLogger.getLogger(Loggers.FORWARDING_ROUTER);
/** Read {@link ByteBuffer} size. */
private final static int READ_BUFFER_SIZE = 4096;
/** True is the router must stop or is stopped*/
private final AtomicBoolean stopped = new AtomicBoolean(false);
/** Can pass when the router has been successfully shutdown */
private final SweetCountDownLatch isStopped = new SweetCountDownLatch(1);
/** The thread running the select loop */
private final AtomicReference<Thread> selectThread = new AtomicReference<Thread>();
/** Thread pool used to execute all asynchronous tasks */
private final ExecutorService tpe;
/** All the clients known by {@link AgentID}*/
private final ConcurrentHashMap<AgentID, Client> clientMap = new ConcurrentHashMap<AgentID, Client>();
/** The local InetAddress on which the router is listening */
private InetAddress inetAddress;
/** The local TCP port on which the router is listening */
private int port;
private Selector selector = null;
private ServerSocketChannel ssc = null;
private ServerSocket serverSocket = null;
/** Create a new router
*
* When a new router is created it binds onto the given port.
*
* To start handling connections, a thread MUST be spawned by the caller.
* <code>
* RouterImpl this.router = new RouterImpl(0);
* Thread t = new Thread(router);
* t.setDaemon(true);
* t.setName("Router");
* t.start();
* </code>
*
* @param port port number to bind to
* @throws IOException if the router failed to bind
*/
RouterImpl(RouterConfig config) throws IOException {
init(config);
tpe = Executors.newFixedThreadPool(config.getNbWorkerThreads());
}
private void init(RouterConfig config) throws IOException {
// Create a new selector
selector = Selector.open();
// Open a listener on the right port
ssc = ServerSocketChannel.open();
ssc.configureBlocking(false);
serverSocket = ssc.socket();
this.inetAddress = config.getInetAddress();
this.port = config.getPort();
InetSocketAddress isa = new InetSocketAddress(this.inetAddress, this.port);
this.inetAddress = isa.getAddress();
serverSocket.bind(isa);
this.port = serverSocket.getLocalPort();
logger.info("Message router listening on " + serverSocket.toString());
// register the listener with the selector
ssc.register(selector, SelectionKey.OP_ACCEPT);
}
public void run() {
boolean r = this.selectThread.compareAndSet(null, Thread.currentThread());
if (r == false) {
logger.error("A select thread has already been started, aborting the current thread ",
new Exception());
return;
}
Set<SelectionKey> selectedKeys = null;
Iterator<SelectionKey> it;
SelectionKey key;
while (this.stopped.get() == false) {
// select new keys
try {
selector.select();
selectedKeys = selector.selectedKeys();
it = selectedKeys.iterator();
while (it.hasNext()) {
key = (SelectionKey) it.next();
it.remove();
if ((key.readyOps() & SelectionKey.OP_ACCEPT) == SelectionKey.OP_ACCEPT) {
this.handleAccept(key);
} else if ((key.readyOps() & SelectionKey.OP_READ) == SelectionKey.OP_READ) {
this.handleRead(key);
} else {
logger.warn("Unhandled SelectionKey operation");
}
}
} catch (IOException e) {
logger.warn("Select failed", e);
}
}
this.cleanup();
}
/** Stop the router and free all resources*/
private void cleanup() {
tpe.shutdown();
for (Client client : clientMap.values()) {
client.discardAttachment();
}
try {
/* Not sure if we have to set the attachments to null
* Possible memory leak
*/
this.ssc.socket().close();
this.ssc.close();
this.selector.close();
} catch (IOException e) {
ProActiveLogger.logEatedException(logger, e);
}
this.isStopped.countDown();
}
/** Accept a new connection */
private void handleAccept(SelectionKey key) {
SocketChannel sc;
try {
sc = ((ServerSocketChannel) key.channel()).accept();
sc.configureBlocking(false);
// Add the new connection to the selector
sc.register(selector, SelectionKey.OP_READ);
} catch (IOException e) {
logger.warn("Failed to accept a new connection", e);
}
}
/** Read available data for this key */
private void handleRead(SelectionKey key) {
SocketChannel sc;
ByteBuffer buffer = ByteBuffer.allocate(READ_BUFFER_SIZE);
sc = (SocketChannel) key.channel();
Attachment attachment = (Attachment) key.attachment();
if (attachment == null) {
attachment = new Attachment(this, sc);
key.attach(attachment);
}
// Read all the data available
try {
int byteRead;
do {
buffer.clear();
byteRead = sc.read(buffer);
buffer.flip();
if (byteRead > 0) {
MessageAssembler assembler = attachment.getAssembler();
assembler.pushBuffer(buffer);
}
} while (byteRead > 0);
if (byteRead == -1) {
clientDisconnected(key);
}
} catch (IOException e) {
clientDisconnected(key);
} catch (IllegalStateException e) {
// Disconnect the client to avoid a disaster
clientDisconnected(key);
}
}
/** clean everything when a client disconnect */
private void clientDisconnected(SelectionKey key) {
Attachment attachment = (Attachment) key.attachment();
key.cancel();
key.attach(null);
SocketChannel sc = (SocketChannel) key.channel();
try {
sc.socket().close();
} catch (IOException e) {
// Miam Miam Miam
ProActiveLogger.logEatedException(logger, e);
}
try {
sc.close();
} catch (IOException e) {
// Miam Miam Miam
ProActiveLogger.logEatedException(logger, e);
}
Client client = attachment.getClient();
if (client != null) {
client.discardAttachment();
}
logger.debug("Client " + sc.socket() + " disconnected");
// Broadcast the disconnection to every client
Collection<Client> clients = clientMap.values();
AgentID disconnectedAgent = attachment.getClient().getAgentId();
tpe.submit(new DisconnectionBroadcaster(clients, disconnectedAgent));
}
/* @@@@@@@@@@ ROUTER PACKAGE INTERFACE
*
* Theses methods cannot be package private due to the processor sub package
*/
public void handleAsynchronously(ByteBuffer message, Attachment attachment) {
TopLevelProcessor tlp = new TopLevelProcessor(message, attachment, this);
tpe.execute(tlp);
}
public Client getClient(AgentID agentId) {
synchronized (clientMap) {
return clientMap.get(agentId);
}
}
public void addClient(Client client) {
synchronized (clientMap) {
clientMap.put(client.getAgentId(), client);
}
}
/* @@@@@@@@@@ ROUTER PUBLIC INTERFACE: Router */
public int getPort() {
return this.port;
}
public InetAddress getInetAddr() {
return this.inetAddress;
}
public void stop() {
if (this.stopped.get() == true)
throw new IllegalStateException("Router already stopped");
this.stopped.set(true);
Thread t = this.selectThread.get();
if (t != null) {
t.interrupt();
this.isStopped.await();
}
}
private static class DisconnectionBroadcaster implements Runnable {
final private List<Client> clients;
final private AgentID disconnectedAgent;
public DisconnectionBroadcaster(Collection<Client> clients, AgentID disconnectedAgent) {
this.clients = new ArrayList<Client>(clients);
this.disconnectedAgent = disconnectedAgent;
}
public void run() {
for (Client client : this.clients) {
ErrorMessage error = new ErrorMessage(ErrorType.ERR_DISCONNECTION_BROADCAST, client
.getAgentId(), this.disconnectedAgent, 0);
try {
client.sendMessage(error.toByteArray());
} catch (Exception e) {
ProActiveLogger.logEatedException(logger, e);
}
}
}
}
}
| Fixed PROACTIVE-672.
Broadcast of the diconnection must only be performed if the handshake
has completed. Otherwise the connection can just be closed.
git-svn-id: 9146c88ff6d39b48099bf954d15d68f687b3fa69@12675 28e8926c-6b08-0410-baaa-805c5e19b8d6
| src/Extra/org/objectweb/proactive/extra/messagerouting/router/RouterImpl.java | Fixed PROACTIVE-672. |
|
Java | lgpl-2.1 | d20eae75708dbd6f3fdb0b608d7e79753db8305c | 0 | justincc/intermine,drhee/toxoMine,JoeCarlson/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,kimrutherford/intermine,justincc/intermine,kimrutherford/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,kimrutherford/intermine,zebrafishmine/intermine,kimrutherford/intermine,elsiklab/intermine,drhee/toxoMine,JoeCarlson/intermine,tomck/intermine,julie-sullivan/phytomine,julie-sullivan/phytomine,kimrutherford/intermine,drhee/toxoMine,elsiklab/intermine,JoeCarlson/intermine,joshkh/intermine,JoeCarlson/intermine,drhee/toxoMine,zebrafishmine/intermine,JoeCarlson/intermine,justincc/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,justincc/intermine,zebrafishmine/intermine,justincc/intermine,tomck/intermine,justincc/intermine,tomck/intermine,tomck/intermine,drhee/toxoMine,julie-sullivan/phytomine,zebrafishmine/intermine,tomck/intermine,julie-sullivan/phytomine,elsiklab/intermine,zebrafishmine/intermine,julie-sullivan/phytomine,elsiklab/intermine,elsiklab/intermine,justincc/intermine,joshkh/intermine,joshkh/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,kimrutherford/intermine,elsiklab/intermine,zebrafishmine/intermine,drhee/toxoMine,drhee/toxoMine,JoeCarlson/intermine,justincc/intermine,tomck/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,kimrutherford/intermine,JoeCarlson/intermine,joshkh/intermine,zebrafishmine/intermine,zebrafishmine/intermine,JoeCarlson/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,zebrafishmine/intermine,elsiklab/intermine,julie-sullivan/phytomine,drhee/toxoMine,elsiklab/intermine,kimrutherford/intermine,kimrutherford/intermine,JoeCarlson/intermine,julie-sullivan/phytomine,joshkh/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine | intermine/src/java/org/intermine/objectstore/Results.java | package org.flymine.objectstore;
import java.util.Collection;
/**
* The results of some Query being run in an ObjectStore
*
* @author Andrew Varley
*/
public interface Results extends Collection
{
}
| Now resides in the query subpackage
| intermine/src/java/org/intermine/objectstore/Results.java | Now resides in the query subpackage |
||
Java | lgpl-2.1 | 996a218b2b2f3fa36e7a99f98bf46fabd6c0d790 | 0 | deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3 | //$HeadURL$
/*---------------- FILE HEADER ------------------------------------------
This file is part of deegree.
Copyright (C) 2001-2009 by:
Department of Geography, University of Bonn
http://www.giub.uni-bonn.de/deegree/
lat/lon GmbH
http://www.lat-lon.de
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Contact:
Andreas Poth
lat/lon GmbH
Aennchenstr. 19
53177 Bonn
Germany
E-Mail: [email protected]
Prof. Dr. Klaus Greve
Department of Geography
University of Bonn
Meckenheimer Allee 166
53115 Bonn
Germany
E-Mail: [email protected]
---------------------------------------------------------------------------*/
package org.deegree.services.wps.ap.wcts;
import static org.deegree.commons.utils.StringUtils.isSet;
import static org.deegree.gml.GMLVersion.GML_31;
import static org.deegree.gml.GMLVersion.fromMimeType;
import static org.slf4j.LoggerFactory.getLogger;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import org.deegree.commons.tom.ows.CodeType;
import org.deegree.commons.utils.FileUtils;
import org.deegree.commons.xml.XMLParsingException;
import org.deegree.cs.coordinatesystems.ICRS;
import org.deegree.cs.exceptions.OutsideCRSDomainException;
import org.deegree.cs.exceptions.TransformationException;
import org.deegree.cs.exceptions.UnknownCRSException;
import org.deegree.cs.persistence.CRSManager;
import org.deegree.cs.transformations.Transformation;
import org.deegree.gml.GMLVersion;
import org.deegree.gml.utils.XMLTransformer;
import org.deegree.protocol.ows.exception.OWSException;
import org.deegree.protocol.wps.ap.wcts.WCTSConstants;
import org.deegree.services.wps.ExceptionAwareProcesslet;
import org.deegree.services.wps.ExceptionCustomizer;
import org.deegree.services.wps.ProcessletException;
import org.deegree.services.wps.ProcessletExecutionInfo;
import org.deegree.services.wps.ProcessletInputs;
import org.deegree.services.wps.ProcessletOutputs;
import org.deegree.services.wps.input.ComplexInput;
import org.deegree.services.wps.input.LiteralInput;
import org.deegree.services.wps.output.ComplexOutput;
import org.slf4j.Logger;
/**
* The <code>TransformCoordinates</code> class TODO add class documentation here.
*
* @author <a href="mailto:[email protected]">Rutger Bezema</a>
* @author last edited by: $Author$
* @version $Revision$, $Date$
*
*/
public class TransformCoordinates implements ExceptionAwareProcesslet {
private final static GMLVersion defaultGML;
private boolean inspireCompliant;
private static final Logger LOG = getLogger( TransformCoordinates.class );
final static String IN_INPUTDATA = "InputData";
final static String IN_TEST_TRANSFORM = "TestTransformation";
final static String IN_TRANSFORM = "Transformation";
final static String IN_SCRS = "SourceCRS";
final static String IN_TCRS = "TargetCRS";
final static String OUT_DATA = "TransformedData";
static {
URL config = FileUtils.loadDeegreeConfiguration( TransformCoordinates.class, "wcts-configuration.properties" );
GMLVersion configuredVersion = GML_31;
if ( config != null ) {
Properties props = new Properties();
try {
props.load( config.openStream() );
String gmlVersion = props.getProperty( "GML_VERSION" );
if ( gmlVersion != null ) {
try {
configuredVersion = GMLVersion.valueOf( gmlVersion.toUpperCase() );
} catch ( Exception e ) {
LOG.debug( "Your gml version: " + gmlVersion + " could not be mapped, it should be one of: "
+ Arrays.toString( GMLVersion.values() ) );
}
}
} catch ( IOException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Could not load configuration: " + e.getMessage(), e );
} else {
LOG.error( "Could not load configuration: " + e.getMessage() );
}
}
}
defaultGML = configuredVersion;
}
@Override
public void destroy() {
// destroy...
}
@Override
public void init() {
this.inspireCompliant = false;
}
@Override
public void process( ProcessletInputs in, ProcessletOutputs out, ProcessletExecutionInfo info )
throws ProcessletException {
// required by description
ComplexInput xmlInput = (ComplexInput) in.getParameter( IN_INPUTDATA );
String mime = xmlInput.getMimeType();
String inSchema = xmlInput.getSchema();
GMLVersion gmlVersion = fromMimeType( mime, defaultGML );
XMLStreamReader inputData = null;
try {
inputData = xmlInput.getValueAsXMLStream();
// StAXParsingHelper.nextElement( inputData );
} catch ( IOException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception while getting stream from input data: " + e.getMessage(), e );
} else {
LOG.error( "Exception while getting stream from input data: " + e.getMessage() );
}
} catch ( XMLStreamException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception while getting stream from input data: " + e.getMessage(), e );
} else {
LOG.error( "Exception while getting stream from input data: " + e.getMessage() );
}
} catch ( NullPointerException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception (no next element) while getting stream from input data: " + e.getMessage(), e );
} else {
LOG.error( "Exception (no next element) while getting stream from input data: " + e.getMessage() );
}
}
if ( inputData == null ) {
StringBuilder sb = new StringBuilder( "No input data given." );
String execCode = WCTSConstants.ExceptionCodes.NoInputData.name();
throw new ProcessletException( new OWSException( sb.toString(), execCode ) );
}
InputParams evaluatedInput = evaluateInput( in );
ComplexOutput xmlOutput = (ComplexOutput) out.getParameter( OUT_DATA );
String outMime = xmlOutput.getRequestedMimeType();
if ( outMime != null && !outMime.equals( mime ) ) {
throw new ProcessletException(
new OWSException(
"The inspire directive specifies that the output schema equals the input schema, therefore the mimetypes of the incoming data ("
+ mime
+ " and the requested (transformed) outgoing data ("
+ outMime + " must be equal.",
OWSException.INVALID_PARAMETER_VALUE ) );
}
String outSchema = xmlOutput.getRequestedSchema();
if ( outSchema != null && !outSchema.equals( inSchema ) ) {
throw new ProcessletException(
new OWSException( "The inspire directive specifies that the input schema ( "
+ inSchema + ") and the output schema (" + outSchema
+ ") must be equal.", OWSException.INVALID_PARAMETER_VALUE ) );
}
LOG.debug( "Setting XML output (requested=" + xmlOutput.isRequested() + ")" );
XMLStreamWriter writer = null;
try {
writer = xmlOutput.getXMLStreamWriter();
// writer.writeStartDocument();
} catch ( XMLStreamException e ) {
LOG.error( e.getMessage() );
throw new ProcessletException( "Could not create an outputstream." + e.getLocalizedMessage() );
}
// result will not be null
transform( evaluatedInput, inputData, writer, gmlVersion );
try {
// write the end document.
writer.writeEndDocument();
} catch ( XMLStreamException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( e.getLocalizedMessage() );
}
}
private void transform( InputParams evaluatedInput, XMLStreamReader inputData, XMLStreamWriter writer,
GMLVersion gmlVersion )
throws ProcessletException {
try {
List<Transformation> requestedTransformation = null;
if ( evaluatedInput.defaultTransform != null ) {
requestedTransformation = new ArrayList<Transformation>();
requestedTransformation.add( evaluatedInput.defaultTransform );
}
evaluatedInput.transformer.transform( inputData, writer, evaluatedInput.sourceCRS, gmlVersion, true,
requestedTransformation );
} catch ( XMLParsingException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( e.getLocalizedMessage() );
} catch ( IllegalArgumentException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
OWSException.INVALID_PARAMETER_VALUE ) );
} catch ( XMLStreamException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
OWSException.INVALID_PARAMETER_VALUE ) );
} catch ( UnknownCRSException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
OWSException.INVALID_PARAMETER_VALUE ) );
} catch ( TransformationException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
WCTSConstants.ExceptionCodes.NotTransformable.name() ) );
} catch ( OutsideCRSDomainException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
WCTSConstants.ExceptionCodes.InvalidArea.name() ) );
}
}
/**
* @param in
* @return the instantiated source crs and a geometry transformer
*/
private InputParams evaluateInput( ProcessletInputs in )
throws ProcessletException {
XMLTransformer transformer = null;
String sCrs = getLiteralInputValue( in, IN_SCRS );
String tCrs = getLiteralInputValue( in, IN_TCRS );
String transId = getLiteralInputValue( in, IN_TRANSFORM );
boolean testTransformation = testTransformation( in );
int val = isSet( sCrs ) ? 1 : 0;
val += isSet( tCrs ) ? 2 : 0;
val += isSet( transId ) ? 4 : 0;
StringBuilder sb = null;
String execCode = OWSException.MISSING_PARAMETER_VALUE;
ICRS sourceCRS = null;
ICRS targetCRS = null;
Transformation requestedTransform = null;
switch ( val ) {
case 0:
sb = new StringBuilder( "None of, " );
sb.append( IN_SCRS ).append( ", " ).append( IN_TCRS ).append( " and " ).append( IN_TRANSFORM );
sb.append( " given." );
break;
case 1:
// only sCrs
sb = new StringBuilder( "Missing " ).append( IN_TCRS );
break;
case 2:
if ( inspireCompliant ) {
// only tCRS, the crs must be defined in the geometries.
sb = new StringBuilder( "Missing " ).append( IN_SCRS ).append( " or " ).append( IN_TRANSFORM );
} else {
targetCRS = getCRS( tCrs );
if ( targetCRS == null ) {
sb = new StringBuilder();
execCode = OWSException.INVALID_PARAMETER_VALUE;
sb.append( IN_TCRS ).append( " (" ).append( tCrs ).append( ") references an unknown crs." );
} else {
transformer = new XMLTransformer( targetCRS );
}
}
break;
case 3:
sourceCRS = getCRS( sCrs );
targetCRS = getCRS( tCrs );
if ( targetCRS == null || sourceCRS == null ) {
sb = new StringBuilder();
execCode = OWSException.INVALID_PARAMETER_VALUE;
if ( sourceCRS == null ) {
sb.append( IN_SCRS ).append( " (" ).append( sCrs ).append( ") references an unknown crs." );
}
if ( targetCRS == null ) {
sb.append( IN_TCRS ).append( " (" ).append( tCrs ).append( ") references an unknown crs." );
}
} else {
transformer = new XMLTransformer( targetCRS );
}
break;
case 4:
// rb: inspire conform is not available.
sb = new StringBuilder( "No, " );
sb.append( IN_TCRS ).append( " given, it is required." );
break;
case 5:
//
sb = new StringBuilder( "Invalid combination " ).append( IN_SCRS ).append( " and " ).append( IN_TRANSFORM );
execCode = OWSException.INVALID_PARAMETER_VALUE;
break;
case 6:
ICRS tarCRS = getCRS( tCrs );
requestedTransform = CRSManager.getTransformation( null, transId );
if ( requestedTransform == null || tarCRS == null ) {
sb = new StringBuilder();
execCode = OWSException.INVALID_PARAMETER_VALUE;
if ( tarCRS == null ) {
sb.append( IN_TCRS ).append( " (" ).append( tCrs ).append( ") references an unknown crs." );
}
if ( requestedTransform == null ) {
sb.append( IN_TRANSFORM ).append( " (" ).append( transId ).append( ") references an unknown transformation." );
}
} else {
transformer = new XMLTransformer( targetCRS );
}
break;
case 7:
if ( inspireCompliant ) {
// rb: the inspire directive says mutual exclusive.
sb = new StringBuilder( "Mutual exclusion, " );
sb.append( IN_SCRS ).append( ", " ).append( IN_TCRS ).append( " and " ).append( IN_TRANSFORM );
sb.append( " were given, allowed are either: " );
sb.append( IN_SCRS ).append( " and " ).append( IN_TCRS ).append( " or " );
sb.append( IN_TRANSFORM ).append( " and " ).append( IN_TCRS );
execCode = WCTSConstants.ExceptionCodes.MutualExclusionException.name();
} else {
sourceCRS = getCRS( sCrs );
targetCRS = getCRS( tCrs );
requestedTransform = CRSManager.getTransformation( null, transId );
if ( targetCRS == null || sourceCRS == null || requestedTransform == null ) {
sb = new StringBuilder();
execCode = OWSException.INVALID_PARAMETER_VALUE;
if ( sourceCRS == null ) {
sb.append( IN_SCRS ).append( " (" ).append( sCrs ).append( ") references an unknown crs." );
}
if ( targetCRS == null ) {
sb.append( IN_TCRS ).append( " (" ).append( tCrs ).append( ") references an unknown crs." );
}
if ( requestedTransform == null ) {
sb.append( IN_TRANSFORM ).append( " (" ).append( transId ).append( ") references an unknown transformation." );
}
} else {
transformer = new XMLTransformer( targetCRS );
}
}
break;
}
if ( sb != null ) {
if ( testTransformation ) {
execCode = WCTSConstants.ExceptionCodes.NotTransformable.name();
}
throw new ProcessletException( new OWSException( sb.toString(), execCode ) );
}
if ( inspireCompliant ) {
if ( sourceCRS == null || transformer == null ) {
if ( testTransformation ) {
execCode = WCTSConstants.ExceptionCodes.NotTransformable.name();
} else {
execCode = WCTSConstants.ExceptionCodes.OperationNotSupported.name();
}
sb = new StringBuilder( "Unable to fullfill transformation." );
if ( sourceCRS == null ) {
sb.append( IN_SCRS ).append( ", could not be created." );
}
if ( transformer == null ) {
sb.append( "No geometry transformer could be created." );
}
throw new ProcessletException( new OWSException( sb.toString(), execCode ) );
}
} else {
if ( transformer == null ) {
if ( testTransformation ) {
execCode = WCTSConstants.ExceptionCodes.NotTransformable.name();
} else {
execCode = WCTSConstants.ExceptionCodes.OperationNotSupported.name();
}
sb = new StringBuilder( "Unable to fullfill transformation." );
sb.append( "No geometry transformer could be created (e.g. no transformation path available)." );
throw new ProcessletException( new OWSException( sb.toString(), execCode ) );
}
}
if ( testTransformation ) {
execCode = WCTSConstants.ExceptionCodes.Transformable.name();
// rb: wow, the transformation can be applied, but we throw an exception anyway (as the 'spec' says)
// ;-)
throw new ProcessletException( new OWSException( "", execCode ) );
}
return new InputParams( sourceCRS, transformer, requestedTransform );
}
/**
* @param crs
* @return an instantiated crs.(created from provider)
*/
private ICRS getCRS( String crs ) {
ICRS result = null;
try {
result = CRSManager.lookup( crs );
} catch ( UnknownCRSException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( e.getMessage(), e );
}
}
return result;
}
private boolean testTransformation( ProcessletInputs in ) {
String isT = getLiteralInputValue( in, IN_TEST_TRANSFORM );
return !isSet( isT ) ? false : Boolean.parseBoolean( isT );
}
private String getLiteralInputValue( ProcessletInputs in, String paramId ) {
LiteralInput input = ( (LiteralInput) in.getParameter( paramId ) );
return input == null ? null : input.getValue();
}
@Override
public ExceptionCustomizer getExceptionCustomizer() {
return new org.deegree.services.wps.ap.wcts.ExceptionCustomizer( new CodeType( "TransformCoordinates" ) );
}
private class InputParams {
final ICRS sourceCRS;
final XMLTransformer transformer;
final Transformation defaultTransform;
InputParams( ICRS sourceCRS, XMLTransformer transformer, Transformation defaultTransform ) {
this.sourceCRS = sourceCRS;
this.transformer = transformer;
this.defaultTransform = defaultTransform;
}
}
}
| deegree-workspaces/deegree-workspace-wcts/src/main/java/org/deegree/services/wps/ap/wcts/TransformCoordinates.java | //$HeadURL$
/*---------------- FILE HEADER ------------------------------------------
This file is part of deegree.
Copyright (C) 2001-2009 by:
Department of Geography, University of Bonn
http://www.giub.uni-bonn.de/deegree/
lat/lon GmbH
http://www.lat-lon.de
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Contact:
Andreas Poth
lat/lon GmbH
Aennchenstr. 19
53177 Bonn
Germany
E-Mail: [email protected]
Prof. Dr. Klaus Greve
Department of Geography
University of Bonn
Meckenheimer Allee 166
53115 Bonn
Germany
E-Mail: [email protected]
---------------------------------------------------------------------------*/
package org.deegree.services.wps.ap.wcts;
import static org.deegree.commons.utils.StringUtils.isSet;
import static org.deegree.gml.GMLVersion.GML_31;
import static org.deegree.gml.GMLVersion.fromMimeType;
import static org.slf4j.LoggerFactory.getLogger;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import org.deegree.commons.tom.ows.CodeType;
import org.deegree.commons.utils.FileUtils;
import org.deegree.commons.xml.XMLParsingException;
import org.deegree.cs.coordinatesystems.ICRS;
import org.deegree.cs.exceptions.OutsideCRSDomainException;
import org.deegree.cs.exceptions.TransformationException;
import org.deegree.cs.exceptions.UnknownCRSException;
import org.deegree.cs.persistence.CRSManager;
import org.deegree.cs.transformations.Transformation;
import org.deegree.gml.GMLVersion;
import org.deegree.gml.XMLTransformer;
import org.deegree.protocol.ows.exception.OWSException;
import org.deegree.protocol.wps.ap.wcts.WCTSConstants;
import org.deegree.services.wps.ExceptionAwareProcesslet;
import org.deegree.services.wps.ExceptionCustomizer;
import org.deegree.services.wps.ProcessletException;
import org.deegree.services.wps.ProcessletExecutionInfo;
import org.deegree.services.wps.ProcessletInputs;
import org.deegree.services.wps.ProcessletOutputs;
import org.deegree.services.wps.input.ComplexInput;
import org.deegree.services.wps.input.LiteralInput;
import org.deegree.services.wps.output.ComplexOutput;
import org.slf4j.Logger;
/**
* The <code>TransformCoordinates</code> class TODO add class documentation here.
*
* @author <a href="mailto:[email protected]">Rutger Bezema</a>
* @author last edited by: $Author$
* @version $Revision$, $Date$
*
*/
public class TransformCoordinates implements ExceptionAwareProcesslet {
private final static GMLVersion defaultGML;
private boolean inspireCompliant;
private static final Logger LOG = getLogger( TransformCoordinates.class );
final static String IN_INPUTDATA = "InputData";
final static String IN_TEST_TRANSFORM = "TestTransformation";
final static String IN_TRANSFORM = "Transformation";
final static String IN_SCRS = "SourceCRS";
final static String IN_TCRS = "TargetCRS";
final static String OUT_DATA = "TransformedData";
static {
URL config = FileUtils.loadDeegreeConfiguration( TransformCoordinates.class, "wcts-configuration.properties" );
GMLVersion configuredVersion = GML_31;
if ( config != null ) {
Properties props = new Properties();
try {
props.load( config.openStream() );
String gmlVersion = props.getProperty( "GML_VERSION" );
if ( gmlVersion != null ) {
try {
configuredVersion = GMLVersion.valueOf( gmlVersion.toUpperCase() );
} catch ( Exception e ) {
LOG.debug( "Your gml version: " + gmlVersion + " could not be mapped, it should be one of: "
+ Arrays.toString( GMLVersion.values() ) );
}
}
} catch ( IOException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Could not load configuration: " + e.getMessage(), e );
} else {
LOG.error( "Could not load configuration: " + e.getMessage() );
}
}
}
defaultGML = configuredVersion;
}
@Override
public void destroy() {
// destroy...
}
@Override
public void init() {
this.inspireCompliant = false;
}
@Override
public void process( ProcessletInputs in, ProcessletOutputs out, ProcessletExecutionInfo info )
throws ProcessletException {
// required by description
ComplexInput xmlInput = (ComplexInput) in.getParameter( IN_INPUTDATA );
String mime = xmlInput.getMimeType();
String inSchema = xmlInput.getSchema();
GMLVersion gmlVersion = fromMimeType( mime, defaultGML );
XMLStreamReader inputData = null;
try {
inputData = xmlInput.getValueAsXMLStream();
// StAXParsingHelper.nextElement( inputData );
} catch ( IOException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception while getting stream from input data: " + e.getMessage(), e );
} else {
LOG.error( "Exception while getting stream from input data: " + e.getMessage() );
}
} catch ( XMLStreamException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception while getting stream from input data: " + e.getMessage(), e );
} else {
LOG.error( "Exception while getting stream from input data: " + e.getMessage() );
}
} catch ( NullPointerException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception (no next element) while getting stream from input data: " + e.getMessage(), e );
} else {
LOG.error( "Exception (no next element) while getting stream from input data: " + e.getMessage() );
}
}
if ( inputData == null ) {
StringBuilder sb = new StringBuilder( "No input data given." );
String execCode = WCTSConstants.ExceptionCodes.NoInputData.name();
throw new ProcessletException( new OWSException( sb.toString(), execCode ) );
}
InputParams evaluatedInput = evaluateInput( in );
ComplexOutput xmlOutput = (ComplexOutput) out.getParameter( OUT_DATA );
String outMime = xmlOutput.getRequestedMimeType();
if ( outMime != null && !outMime.equals( mime ) ) {
throw new ProcessletException(
new OWSException(
"The inspire directive specifies that the output schema equals the input schema, therefore the mimetypes of the incoming data ("
+ mime
+ " and the requested (transformed) outgoing data ("
+ outMime + " must be equal.",
OWSException.INVALID_PARAMETER_VALUE ) );
}
String outSchema = xmlOutput.getRequestedSchema();
if ( outSchema != null && !outSchema.equals( inSchema ) ) {
throw new ProcessletException(
new OWSException( "The inspire directive specifies that the input schema ( "
+ inSchema + ") and the output schema (" + outSchema
+ ") must be equal.", OWSException.INVALID_PARAMETER_VALUE ) );
}
LOG.debug( "Setting XML output (requested=" + xmlOutput.isRequested() + ")" );
XMLStreamWriter writer = null;
try {
writer = xmlOutput.getXMLStreamWriter();
// writer.writeStartDocument();
} catch ( XMLStreamException e ) {
LOG.error( e.getMessage() );
throw new ProcessletException( "Could not create an outputstream." + e.getLocalizedMessage() );
}
// result will not be null
transform( evaluatedInput, inputData, writer, gmlVersion );
try {
// write the end document.
writer.writeEndDocument();
} catch ( XMLStreamException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( e.getLocalizedMessage() );
}
}
private void transform( InputParams evaluatedInput, XMLStreamReader inputData, XMLStreamWriter writer,
GMLVersion gmlVersion )
throws ProcessletException {
try {
List<Transformation> requestedTransformation = null;
if ( evaluatedInput.defaultTransform != null ) {
requestedTransformation = new ArrayList<Transformation>();
requestedTransformation.add( evaluatedInput.defaultTransform );
}
evaluatedInput.transformer.transform( inputData, writer, evaluatedInput.sourceCRS, gmlVersion, true,
requestedTransformation );
} catch ( XMLParsingException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( e.getLocalizedMessage() );
} catch ( IllegalArgumentException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
OWSException.INVALID_PARAMETER_VALUE ) );
} catch ( XMLStreamException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
OWSException.INVALID_PARAMETER_VALUE ) );
} catch ( UnknownCRSException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
OWSException.INVALID_PARAMETER_VALUE ) );
} catch ( TransformationException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
WCTSConstants.ExceptionCodes.NotTransformable.name() ) );
} catch ( OutsideCRSDomainException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Exception message: " + e.getMessage(), e );
}
throw new ProcessletException( new OWSException( e.getLocalizedMessage(),
WCTSConstants.ExceptionCodes.InvalidArea.name() ) );
}
}
/**
* @param in
* @return the instantiated source crs and a geometry transformer
*/
private InputParams evaluateInput( ProcessletInputs in )
throws ProcessletException {
XMLTransformer transformer = null;
String sCrs = getLiteralInputValue( in, IN_SCRS );
String tCrs = getLiteralInputValue( in, IN_TCRS );
String transId = getLiteralInputValue( in, IN_TRANSFORM );
boolean testTransformation = testTransformation( in );
int val = isSet( sCrs ) ? 1 : 0;
val += isSet( tCrs ) ? 2 : 0;
val += isSet( transId ) ? 4 : 0;
StringBuilder sb = null;
String execCode = OWSException.MISSING_PARAMETER_VALUE;
ICRS sourceCRS = null;
ICRS targetCRS = null;
Transformation requestedTransform = null;
switch ( val ) {
case 0:
sb = new StringBuilder( "None of, " );
sb.append( IN_SCRS ).append( ", " ).append( IN_TCRS ).append( " and " ).append( IN_TRANSFORM );
sb.append( " given." );
break;
case 1:
// only sCrs
sb = new StringBuilder( "Missing " ).append( IN_TCRS );
break;
case 2:
if ( inspireCompliant ) {
// only tCRS, the crs must be defined in the geometries.
sb = new StringBuilder( "Missing " ).append( IN_SCRS ).append( " or " ).append( IN_TRANSFORM );
} else {
targetCRS = getCRS( tCrs );
if ( targetCRS == null ) {
sb = new StringBuilder();
execCode = OWSException.INVALID_PARAMETER_VALUE;
sb.append( IN_TCRS ).append( " (" ).append( tCrs ).append( ") references an unknown crs." );
} else {
transformer = new XMLTransformer( targetCRS );
}
}
break;
case 3:
sourceCRS = getCRS( sCrs );
targetCRS = getCRS( tCrs );
if ( targetCRS == null || sourceCRS == null ) {
sb = new StringBuilder();
execCode = OWSException.INVALID_PARAMETER_VALUE;
if ( sourceCRS == null ) {
sb.append( IN_SCRS ).append( " (" ).append( sCrs ).append( ") references an unknown crs." );
}
if ( targetCRS == null ) {
sb.append( IN_TCRS ).append( " (" ).append( tCrs ).append( ") references an unknown crs." );
}
} else {
transformer = new XMLTransformer( targetCRS );
}
break;
case 4:
// rb: inspire conform is not available.
sb = new StringBuilder( "No, " );
sb.append( IN_TCRS ).append( " given, it is required." );
break;
case 5:
//
sb = new StringBuilder( "Invalid combination " ).append( IN_SCRS ).append( " and " ).append( IN_TRANSFORM );
execCode = OWSException.INVALID_PARAMETER_VALUE;
break;
case 6:
ICRS tarCRS = getCRS( tCrs );
requestedTransform = CRSManager.getTransformation( null, transId );
if ( requestedTransform == null || tarCRS == null ) {
sb = new StringBuilder();
execCode = OWSException.INVALID_PARAMETER_VALUE;
if ( tarCRS == null ) {
sb.append( IN_TCRS ).append( " (" ).append( tCrs ).append( ") references an unknown crs." );
}
if ( requestedTransform == null ) {
sb.append( IN_TRANSFORM ).append( " (" ).append( transId ).append( ") references an unknown transformation." );
}
} else {
transformer = new XMLTransformer( targetCRS );
}
break;
case 7:
if ( inspireCompliant ) {
// rb: the inspire directive says mutual exclusive.
sb = new StringBuilder( "Mutual exclusion, " );
sb.append( IN_SCRS ).append( ", " ).append( IN_TCRS ).append( " and " ).append( IN_TRANSFORM );
sb.append( " were given, allowed are either: " );
sb.append( IN_SCRS ).append( " and " ).append( IN_TCRS ).append( " or " );
sb.append( IN_TRANSFORM ).append( " and " ).append( IN_TCRS );
execCode = WCTSConstants.ExceptionCodes.MutualExclusionException.name();
} else {
sourceCRS = getCRS( sCrs );
targetCRS = getCRS( tCrs );
requestedTransform = CRSManager.getTransformation( null, transId );
if ( targetCRS == null || sourceCRS == null || requestedTransform == null ) {
sb = new StringBuilder();
execCode = OWSException.INVALID_PARAMETER_VALUE;
if ( sourceCRS == null ) {
sb.append( IN_SCRS ).append( " (" ).append( sCrs ).append( ") references an unknown crs." );
}
if ( targetCRS == null ) {
sb.append( IN_TCRS ).append( " (" ).append( tCrs ).append( ") references an unknown crs." );
}
if ( requestedTransform == null ) {
sb.append( IN_TRANSFORM ).append( " (" ).append( transId ).append( ") references an unknown transformation." );
}
} else {
transformer = new XMLTransformer( targetCRS );
}
}
break;
}
if ( sb != null ) {
if ( testTransformation ) {
execCode = WCTSConstants.ExceptionCodes.NotTransformable.name();
}
throw new ProcessletException( new OWSException( sb.toString(), execCode ) );
}
if ( inspireCompliant ) {
if ( sourceCRS == null || transformer == null ) {
if ( testTransformation ) {
execCode = WCTSConstants.ExceptionCodes.NotTransformable.name();
} else {
execCode = WCTSConstants.ExceptionCodes.OperationNotSupported.name();
}
sb = new StringBuilder( "Unable to fullfill transformation." );
if ( sourceCRS == null ) {
sb.append( IN_SCRS ).append( ", could not be created." );
}
if ( transformer == null ) {
sb.append( "No geometry transformer could be created." );
}
throw new ProcessletException( new OWSException( sb.toString(), execCode ) );
}
} else {
if ( transformer == null ) {
if ( testTransformation ) {
execCode = WCTSConstants.ExceptionCodes.NotTransformable.name();
} else {
execCode = WCTSConstants.ExceptionCodes.OperationNotSupported.name();
}
sb = new StringBuilder( "Unable to fullfill transformation." );
sb.append( "No geometry transformer could be created (e.g. no transformation path available)." );
throw new ProcessletException( new OWSException( sb.toString(), execCode ) );
}
}
if ( testTransformation ) {
execCode = WCTSConstants.ExceptionCodes.Transformable.name();
// rb: wow, the transformation can be applied, but we throw an exception anyway (as the 'spec' says)
// ;-)
throw new ProcessletException( new OWSException( "", execCode ) );
}
return new InputParams( sourceCRS, transformer, requestedTransform );
}
/**
* @param crs
* @return an instantiated crs.(created from provider)
*/
private ICRS getCRS( String crs ) {
ICRS result = null;
try {
result = CRSManager.lookup( crs );
} catch ( UnknownCRSException e ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( e.getMessage(), e );
}
}
return result;
}
private boolean testTransformation( ProcessletInputs in ) {
String isT = getLiteralInputValue( in, IN_TEST_TRANSFORM );
return !isSet( isT ) ? false : Boolean.parseBoolean( isT );
}
private String getLiteralInputValue( ProcessletInputs in, String paramId ) {
LiteralInput input = ( (LiteralInput) in.getParameter( paramId ) );
return input == null ? null : input.getValue();
}
@Override
public ExceptionCustomizer getExceptionCustomizer() {
return new org.deegree.services.wps.ap.wcts.ExceptionCustomizer( new CodeType( "TransformCoordinates" ) );
}
private class InputParams {
final ICRS sourceCRS;
final XMLTransformer transformer;
final Transformation defaultTransform;
InputParams( ICRS sourceCRS, XMLTransformer transformer, Transformation defaultTransform ) {
this.sourceCRS = sourceCRS;
this.transformer = transformer;
this.defaultTransform = defaultTransform;
}
}
}
| http://tracker.deegree.org/deegree-services/ticket/307: Minor cleanup of GML-related classes.
| deegree-workspaces/deegree-workspace-wcts/src/main/java/org/deegree/services/wps/ap/wcts/TransformCoordinates.java | http://tracker.deegree.org/deegree-services/ticket/307: Minor cleanup of GML-related classes. |
|
Java | apache-2.0 | 85865eed45222b6be5fb0ef6b6d5837baf01018d | 0 | nimble-platform/identity-service,nimble-platform/identity-service | package eu.nimble.core.infrastructure.identity.system;
import eu.nimble.core.infrastructure.identity.clients.CatalogueServiceClient;
import eu.nimble.core.infrastructure.identity.clients.IndexingClient;
import eu.nimble.core.infrastructure.identity.constants.GlobalConstants;
import eu.nimble.core.infrastructure.identity.entity.UaaUser;
import eu.nimble.core.infrastructure.identity.entity.UserInvitation;
import eu.nimble.core.infrastructure.identity.mail.EmailService;
import eu.nimble.core.infrastructure.identity.repository.*;
import eu.nimble.core.infrastructure.identity.service.AdminService;
import eu.nimble.core.infrastructure.identity.service.IdentityService;
import eu.nimble.core.infrastructure.identity.service.RocketChatService;
import eu.nimble.core.infrastructure.identity.uaa.KeycloakAdmin;
import eu.nimble.core.infrastructure.identity.uaa.OAuthClient;
import eu.nimble.core.infrastructure.identity.utils.DataModelUtils;
import eu.nimble.core.infrastructure.identity.clients.IndexingClientController;
import eu.nimble.core.infrastructure.identity.utils.LogEvent;
import eu.nimble.service.model.solr.Search;
import eu.nimble.service.model.ubl.commonaggregatecomponents.PartyType;
import eu.nimble.service.model.ubl.commonaggregatecomponents.PersonType;
import eu.nimble.service.model.ubl.commonaggregatecomponents.QualifyingPartyType;
import eu.nimble.utility.ExecutionContext;
import eu.nimble.utility.LoggerUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static eu.nimble.core.infrastructure.identity.uaa.OAuthClient.Role.PLATFORM_MANAGER;
/**
* Created by Johannes Innerbichler on 12.09.18.
*/
@Controller
@RequestMapping(path = "/admin")
@Api(value = "Admin API", description = "Administration services for managing identity on the platform.")
public class AdminController {
private static final Logger logger = LoggerFactory.getLogger(AdminController.class);
private static final String DEFAULT_PAGE_SIZE = "10";
@Autowired
private AdminService adminService;
@Autowired
private KeycloakAdmin keycloakAdmin;
@Autowired
private PersonRepository personRepository;
@Autowired
private UaaUserRepository uaaUserRepository;
@Autowired
private IdentityService identityService;
@Autowired
private UserInvitationRepository userInvitationRepository;
@Autowired
private ExecutionContext executionContext;
@Autowired
private IndexingClientController indexingController;
@Autowired
private CatalogueServiceClient catalogueServiceClient;
@Autowired
private PartyRepository partyRepository;
@Autowired
private QualifyingPartyRepository qualifyingPartyRepository;
@Autowired
private RocketChatService chatService;
@Autowired
private EmailService emailService;
@ApiOperation(value = "Retrieve unverified companies", response = Page.class)
@RequestMapping(value = "/unverified_companies", produces = {"application/json"}, method = RequestMethod.GET)
ResponseEntity<Page<PartyType>> getUnverifiedCompanies(@RequestParam(value = "page", required = false, defaultValue = "1") int pageNumber,
@RequestParam(value = "size", required = false, defaultValue = DEFAULT_PAGE_SIZE) int pageSize,
@RequestParam(value = "sortBy", required = false, defaultValue = GlobalConstants.PARTY_NAME_STRING) String sortBy,
@RequestParam(value = "orderBy", required = false, defaultValue = GlobalConstants.ASCENDING_STRING) String orderBy){
// ToDo: verify proper access policy (e.g. admin role)
logger.info("Fetching unverified companies");
List<PartyType> unverifiedCompanies = adminService.queryCompanies(AdminService.CompanyState.UNVERIFIED);
if(!orderBy.isEmpty())
adminService.sortCompanies(unverifiedCompanies, sortBy, orderBy);
// paginate results
return makePage(pageNumber, pageSize, unverifiedCompanies);
}
@ApiOperation(value = "Retrieve verified companies", response = Page.class)
@RequestMapping(value = "/verified_companies", produces = {"application/json"}, method = RequestMethod.GET)
ResponseEntity<Page<PartyType>> getVerifiedCompanies(@RequestParam(value = "page", required = false, defaultValue = "1") int pageNumber,
@RequestParam(value = "size", required = false, defaultValue = DEFAULT_PAGE_SIZE) int pageSize,
@RequestParam(value = "sortBy", required = false, defaultValue = GlobalConstants.PARTY_NAME_STRING) String sortBy,
@RequestParam(value = "orderBy", required = false, defaultValue = GlobalConstants.ASCENDING_STRING) String orderBy){
// ToDo: verify proper access policy (e.g. admin role)
logger.info("Fetching unverified companies");
List<PartyType> verifiedCompanies = adminService.queryCompanies(AdminService.CompanyState.VERIFIED);
if(!orderBy.isEmpty())
adminService.sortCompanies(verifiedCompanies, sortBy, orderBy);
// paginate results
return makePage(pageNumber, pageSize, verifiedCompanies);
}
@ApiOperation(value = "Verify company")
@RequestMapping(value = "/verify_company", method = RequestMethod.POST)
ResponseEntity<?> verifyCompany(@RequestParam(value = "companyId") long companyId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
if (identityService.hasAnyRole(bearer, OAuthClient.Role.PLATFORM_MANAGER) == false)
return new ResponseEntity<>("Only legal platform managers are allowed to verify companies", HttpStatus.UNAUTHORIZED);
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.VERIFY_COMPANY.getActivity());
paramMap.put("companyId", String.valueOf(companyId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Verifying company with id {}", companyId);
adminService.verifyCompany(companyId, bearer);
return ResponseEntity.ok().build();
}
@ApiOperation(value = "Revert a deleted company back")
@RequestMapping(value = "/revert_company/{companyId}", method = RequestMethod.POST)
ResponseEntity<?> revertCompany(@PathVariable(value = "companyId") long companyId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
if (identityService.hasAnyRole(bearer, OAuthClient.Role.PLATFORM_MANAGER,OAuthClient.Role.COMPANY_ADMIN,
OAuthClient.Role.INITIAL_REPRESENTATIVE,OAuthClient.Role.LEGAL_REPRESENTATIVE,OAuthClient.Role.EXTERNAL_REPRESENTATIVE) == false)
return new ResponseEntity<>("Only platform managers,company_admin, external_representative, "
+ "initial_representative, legal_representative are allowed to revert companies back", HttpStatus.UNAUTHORIZED);
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.REVERT_COMPANY.getActivity());
paramMap.put("companyId", String.valueOf(companyId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Reverting company with id {}", companyId);
boolean isCompanyReverted = adminService.revertCompany(companyId,bearer);
if(isCompanyReverted){
//index catalogues
catalogueServiceClient.indexAllCatalogues(Long.toString(companyId),bearer);
//index party
PartyType company = partyRepository.findByHjid(companyId).stream().findFirst().orElseThrow(ControllerUtils.CompanyNotFoundException::new);
// retrieve qualifying party
QualifyingPartyType qualifyingParty = qualifyingPartyRepository.findByParty(company).stream().findFirst().get();
eu.nimble.service.model.solr.party.PartyType newParty = DataModelUtils.toIndexParty(company,qualifyingParty);
List<IndexingClient> indexingClients = indexingController.getClients();
for (IndexingClient indexingClient : indexingClients) {
indexingClient.setParty(newParty, bearer);
}
return ResponseEntity.ok().build();
}else{
return new ResponseEntity<>("Only company_admin, external_representative, "
+ "initial_representative, legal_representative of company are allowed to revert companies back",
HttpStatus.UNAUTHORIZED);
}
}
@ApiOperation(value = "Reject company")
@RequestMapping(value = "/reject_company/{companyId}", method = RequestMethod.DELETE)
ResponseEntity<?> rejectCompany(@PathVariable(value = "companyId") long companyId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
if (identityService.hasAnyRole(bearer, OAuthClient.Role.PLATFORM_MANAGER,OAuthClient.Role.COMPANY_ADMIN,
OAuthClient.Role.INITIAL_REPRESENTATIVE,OAuthClient.Role.LEGAL_REPRESENTATIVE,OAuthClient.Role.EXTERNAL_REPRESENTATIVE) == false)
return new ResponseEntity<>("Only platform managers,company_admin, external_representative, "
+ "initial_representative, legal_representative are allowed to reject companies", HttpStatus.UNAUTHORIZED);
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.REJECT_COMPANY.getActivity());
paramMap.put("companyId", String.valueOf(companyId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Rejecting company with id {}", companyId);
// retrieve party
PartyType company = partyRepository.findByHjid(companyId).stream().findFirst().orElseThrow(ControllerUtils.CompanyNotFoundException::new);
// some companies might not have any associated person
if(company.getPerson() != null && !company.getPerson().isEmpty()){
// retrieve person
PersonType person = company.getPerson().get(0);
String emailAddress = person.getContact().getElectronicMail();
// retrieve uaa user and delete the user from keycloak
try{
// retrieve uaa user
UaaUser uaaUser = uaaUserRepository.findByUblPerson(person).stream().findFirst().orElseThrow(ControllerUtils.PersonNotFoundException::new);
// delete the user from keycloak
keycloakAdmin.deleteUser(uaaUser.getExternalID());
}catch (ControllerUtils.PersonNotFoundException exception){
logger.error("No UaaUser is found for person with id: {}",person.getID(),exception);
}
// delete the user from UaaUser
uaaUserRepository.deleteByUblPerson(person);
// delete person
personRepository.delete(person);
// remove the user from RocketChat if enabled
if(chatService.isChatEnabled()){
chatService.deleteUser(emailAddress);
}
}
// delete company permanently
adminService.deleteCompanyPermanently(companyId);
// remove party from the solr indexes
for (IndexingClient indexingClient : indexingController.getClients()) {
indexingClient.deleteParty(String.valueOf(companyId), bearer);
}
return ResponseEntity.ok().build();
}
@ApiOperation(value = "Delete company")
@RequestMapping(value = "/delete_company/{companyId}", method = RequestMethod.DELETE)
ResponseEntity<?> deleteCompany(@PathVariable(value = "companyId") long companyId,
@RequestParam(value = "userId") long userId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
if (identityService.hasAnyRole(bearer, OAuthClient.Role.PLATFORM_MANAGER,OAuthClient.Role.COMPANY_ADMIN,
OAuthClient.Role.INITIAL_REPRESENTATIVE,OAuthClient.Role.LEGAL_REPRESENTATIVE,OAuthClient.Role.EXTERNAL_REPRESENTATIVE) == false)
return new ResponseEntity<>("Only platform managers,company_admin, external_representative, "
+ "initial_representative, legal_representative are allowed to delete companies", HttpStatus.UNAUTHORIZED);
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.DELETE_COMPANY.getActivity());
paramMap.put("companyId", String.valueOf(companyId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Deleting company with id {}", companyId);
boolean isCompanyDeleted = adminService.deleteCompany(companyId,bearer,userId);
if(isCompanyDeleted){
//find items idexed by the manufaturer
eu.nimble.service.model.solr.Search search = new Search();
search.setQuery("manufacturerId:"+companyId);
eu.nimble.service.model.solr.SearchResult sr = indexingController.getNimbleIndexClient().searchItem(search, bearer);
List<Object> result = sr.getResult();
Set<String> catIds = new HashSet<String>();
for(Object ob : result){
LinkedHashMap<String,String> lmap = (LinkedHashMap<String, String>) ob;
String catLineId = lmap.get("uri");
String catalogueId = lmap.get("catalogueId");
if(catalogueId != null){
catIds.add(catalogueId);
}
//remove items from indexing
for (IndexingClient indexingClient : indexingController.getClients()) {
indexingClient.removeItem(catLineId, bearer);
}
}
Iterator iterate = catIds.iterator();
while (iterate.hasNext()){
//remove catalogue from the index
for (IndexingClient indexingClient : indexingController.getClients()) {
indexingClient.deleteCatalogue(iterate.next().toString(), bearer);
}
}
//delete party from the indexes
for (IndexingClient indexingClient : indexingController.getClients()) {
indexingClient.deleteParty(String.valueOf(companyId), bearer);
}
// send email to Legal Representatives of the company
PartyType party = partyRepository.findByHjid(companyId).stream().findFirst().orElseThrow(ControllerUtils.CompanyNotFoundException::new);
// enrich persons with roles
identityService.enrichWithRoles(party);
List<PersonType> legalRepresentatives = party.getPerson().stream().filter(personType -> personType.getRole().contains(OAuthClient.Role.LEGAL_REPRESENTATIVE.toString())).collect(Collectors.toList());
emailService.notifyDeletedCompany(legalRepresentatives,party,executionContext.getLanguageId());
return ResponseEntity.ok().build();
}else{
return new ResponseEntity<>("Only platform managers or company members are allowed to delete the company.",
HttpStatus.UNAUTHORIZED);
}
}
@ApiOperation(value = "Delete user (marks user as deleted)")
@RequestMapping(value = "/delete_user/{userId}", method = RequestMethod.DELETE)
ResponseEntity<?> deleteUser(@PathVariable(value = "userId") long userId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.DELETE_USER.getActivity());
paramMap.put("userId", String.valueOf(userId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Deleting user with id {}", userId);
boolean status = adminService.deletePerson(userId,bearer,false);
if(status){
return ResponseEntity.ok().build();
}else{
return new ResponseEntity<>("Only platform managers and users by them are allowed to delete users", HttpStatus.UNAUTHORIZED);
}
}
@ApiOperation(value = "Delete user from the platform permanently")
@RequestMapping(value = "/user", method = RequestMethod.DELETE)
ResponseEntity<?> deleteUserPermanently(@RequestParam(value = "username") String username,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
// validate role
if (!identityService.hasAnyRole(bearer, PLATFORM_MANAGER))
return new ResponseEntity<>("Only platform managers are allowed to delete a user from the platform permanently", HttpStatus.FORBIDDEN);
// delete the user from keycloak
keycloakAdmin.deleteUserByUsername(username);
// retrieve uaa user
UaaUser uaaUser = uaaUserRepository.findOneByUsername(username);
if(uaaUser != null){
// retrieve person
PersonType person = uaaUser.getUBLPerson();
// delete the user from UaaUser
uaaUserRepository.deleteByUblPerson(person);
// delete person
personRepository.delete(person);
// delete user invitations
List<UserInvitation> userInvitations = userInvitationRepository.findByEmail(person.getContact().getElectronicMail());
userInvitations.forEach(userInvitation -> userInvitationRepository.delete(userInvitation));
// remove the user from RocketChat if enabled
if(chatService.isChatEnabled()){
chatService.deleteUser(person.getContact().getElectronicMail());
}
}
return ResponseEntity.ok().build();
}
private ResponseEntity<Page<PartyType>> makePage(@RequestParam(value = "page", required = false, defaultValue = "1") int pageNumber, @RequestParam(value = "size", required = false, defaultValue = DEFAULT_PAGE_SIZE) int pageSize, List<PartyType> unverifiedCompanies) {
int start = (pageNumber - 1) * pageSize;
int end = (start + pageSize) > unverifiedCompanies.size() ? unverifiedCompanies.size() : (start + pageSize);
Page<PartyType> companyPage = new PageImpl<>(unverifiedCompanies.subList(start, end), new PageRequest(pageNumber - 1, pageSize), unverifiedCompanies.size());
return ResponseEntity.ok(companyPage);
}
}
| identity-service/src/main/java/eu/nimble/core/infrastructure/identity/system/AdminController.java | package eu.nimble.core.infrastructure.identity.system;
import eu.nimble.core.infrastructure.identity.clients.CatalogueServiceClient;
import eu.nimble.core.infrastructure.identity.clients.IndexingClient;
import eu.nimble.core.infrastructure.identity.constants.GlobalConstants;
import eu.nimble.core.infrastructure.identity.entity.UaaUser;
import eu.nimble.core.infrastructure.identity.entity.UserInvitation;
import eu.nimble.core.infrastructure.identity.mail.EmailService;
import eu.nimble.core.infrastructure.identity.repository.*;
import eu.nimble.core.infrastructure.identity.service.AdminService;
import eu.nimble.core.infrastructure.identity.service.IdentityService;
import eu.nimble.core.infrastructure.identity.service.RocketChatService;
import eu.nimble.core.infrastructure.identity.uaa.KeycloakAdmin;
import eu.nimble.core.infrastructure.identity.uaa.OAuthClient;
import eu.nimble.core.infrastructure.identity.utils.DataModelUtils;
import eu.nimble.core.infrastructure.identity.clients.IndexingClientController;
import eu.nimble.core.infrastructure.identity.utils.LogEvent;
import eu.nimble.service.model.solr.Search;
import eu.nimble.service.model.ubl.commonaggregatecomponents.PartyType;
import eu.nimble.service.model.ubl.commonaggregatecomponents.PersonType;
import eu.nimble.service.model.ubl.commonaggregatecomponents.QualifyingPartyType;
import eu.nimble.utility.ExecutionContext;
import eu.nimble.utility.LoggerUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static eu.nimble.core.infrastructure.identity.uaa.OAuthClient.Role.PLATFORM_MANAGER;
/**
* Created by Johannes Innerbichler on 12.09.18.
*/
@Controller
@RequestMapping(path = "/admin")
@Api(value = "Admin API", description = "Administration services for managing identity on the platform.")
public class AdminController {
private static final Logger logger = LoggerFactory.getLogger(AdminController.class);
private static final String DEFAULT_PAGE_SIZE = "10";
@Autowired
private AdminService adminService;
@Autowired
private KeycloakAdmin keycloakAdmin;
@Autowired
private PersonRepository personRepository;
@Autowired
private UaaUserRepository uaaUserRepository;
@Autowired
private IdentityService identityService;
@Autowired
private UserInvitationRepository userInvitationRepository;
@Autowired
private ExecutionContext executionContext;
@Autowired
private IndexingClientController indexingController;
@Autowired
private CatalogueServiceClient catalogueServiceClient;
@Autowired
private PartyRepository partyRepository;
@Autowired
private QualifyingPartyRepository qualifyingPartyRepository;
@Autowired
private RocketChatService chatService;
@Autowired
private EmailService emailService;
@ApiOperation(value = "Retrieve unverified companies", response = Page.class)
@RequestMapping(value = "/unverified_companies", produces = {"application/json"}, method = RequestMethod.GET)
ResponseEntity<Page<PartyType>> getUnverifiedCompanies(@RequestParam(value = "page", required = false, defaultValue = "1") int pageNumber,
@RequestParam(value = "size", required = false, defaultValue = DEFAULT_PAGE_SIZE) int pageSize,
@RequestParam(value = "sortBy", required = false, defaultValue = GlobalConstants.PARTY_NAME_STRING) String sortBy,
@RequestParam(value = "orderBy", required = false, defaultValue = GlobalConstants.ASCENDING_STRING) String orderBy){
// ToDo: verify proper access policy (e.g. admin role)
logger.info("Fetching unverified companies");
List<PartyType> unverifiedCompanies = adminService.queryCompanies(AdminService.CompanyState.UNVERIFIED);
if(!orderBy.isEmpty())
adminService.sortCompanies(unverifiedCompanies, sortBy, orderBy);
// paginate results
return makePage(pageNumber, pageSize, unverifiedCompanies);
}
@ApiOperation(value = "Retrieve verified companies", response = Page.class)
@RequestMapping(value = "/verified_companies", produces = {"application/json"}, method = RequestMethod.GET)
ResponseEntity<Page<PartyType>> getVerifiedCompanies(@RequestParam(value = "page", required = false, defaultValue = "1") int pageNumber,
@RequestParam(value = "size", required = false, defaultValue = DEFAULT_PAGE_SIZE) int pageSize,
@RequestParam(value = "sortBy", required = false, defaultValue = GlobalConstants.PARTY_NAME_STRING) String sortBy,
@RequestParam(value = "orderBy", required = false, defaultValue = GlobalConstants.ASCENDING_STRING) String orderBy){
// ToDo: verify proper access policy (e.g. admin role)
logger.info("Fetching unverified companies");
List<PartyType> verifiedCompanies = adminService.queryCompanies(AdminService.CompanyState.VERIFIED);
if(!orderBy.isEmpty())
adminService.sortCompanies(verifiedCompanies, sortBy, orderBy);
// paginate results
return makePage(pageNumber, pageSize, verifiedCompanies);
}
@ApiOperation(value = "Verify company")
@RequestMapping(value = "/verify_company", method = RequestMethod.POST)
ResponseEntity<?> verifyCompany(@RequestParam(value = "companyId") long companyId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
if (identityService.hasAnyRole(bearer, OAuthClient.Role.PLATFORM_MANAGER) == false)
return new ResponseEntity<>("Only legal platform managers are allowed to verify companies", HttpStatus.UNAUTHORIZED);
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.VERIFY_COMPANY.getActivity());
paramMap.put("companyId", String.valueOf(companyId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Verifying company with id {}", companyId);
adminService.verifyCompany(companyId, bearer);
return ResponseEntity.ok().build();
}
@ApiOperation(value = "Revert a deleted company back")
@RequestMapping(value = "/revert_company/{companyId}", method = RequestMethod.POST)
ResponseEntity<?> revertCompany(@PathVariable(value = "companyId") long companyId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
if (identityService.hasAnyRole(bearer, OAuthClient.Role.PLATFORM_MANAGER,OAuthClient.Role.COMPANY_ADMIN,
OAuthClient.Role.INITIAL_REPRESENTATIVE,OAuthClient.Role.LEGAL_REPRESENTATIVE,OAuthClient.Role.EXTERNAL_REPRESENTATIVE) == false)
return new ResponseEntity<>("Only platform managers,company_admin, external_representative, "
+ "initial_representative, legal_representative are allowed to revert companies back", HttpStatus.UNAUTHORIZED);
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.REVERT_COMPANY.getActivity());
paramMap.put("companyId", String.valueOf(companyId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Reverting company with id {}", companyId);
boolean isCompanyReverted = adminService.revertCompany(companyId,bearer);
if(isCompanyReverted){
//index catalogues
catalogueServiceClient.indexAllCatalogues(Long.toString(companyId),bearer);
//index party
PartyType company = partyRepository.findByHjid(companyId).stream().findFirst().orElseThrow(ControllerUtils.CompanyNotFoundException::new);
// retrieve qualifying party
QualifyingPartyType qualifyingParty = qualifyingPartyRepository.findByParty(company).stream().findFirst().get();
eu.nimble.service.model.solr.party.PartyType newParty = DataModelUtils.toIndexParty(company,qualifyingParty);
List<IndexingClient> indexingClients = indexingController.getClients();
for (IndexingClient indexingClient : indexingClients) {
indexingClient.setParty(newParty, bearer);
}
return ResponseEntity.ok().build();
}else{
return new ResponseEntity<>("Only company_admin, external_representative, "
+ "initial_representative, legal_representative of company are allowed to revert companies back",
HttpStatus.UNAUTHORIZED);
}
}
@ApiOperation(value = "Reject company")
@RequestMapping(value = "/reject_company/{companyId}", method = RequestMethod.DELETE)
ResponseEntity<?> rejectCompany(@PathVariable(value = "companyId") long companyId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
if (identityService.hasAnyRole(bearer, OAuthClient.Role.PLATFORM_MANAGER,OAuthClient.Role.COMPANY_ADMIN,
OAuthClient.Role.INITIAL_REPRESENTATIVE,OAuthClient.Role.LEGAL_REPRESENTATIVE,OAuthClient.Role.EXTERNAL_REPRESENTATIVE) == false)
return new ResponseEntity<>("Only platform managers,company_admin, external_representative, "
+ "initial_representative, legal_representative are allowed to reject companies", HttpStatus.UNAUTHORIZED);
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.REJECT_COMPANY.getActivity());
paramMap.put("companyId", String.valueOf(companyId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Rejecting company with id {}", companyId);
// retrieve party
PartyType company = partyRepository.findByHjid(companyId).stream().findFirst().orElseThrow(ControllerUtils.CompanyNotFoundException::new);
// some companies might not have any associated person
if(company.getPerson() != null && !company.getPerson().isEmpty()){
// retrieve person
PersonType person = company.getPerson().get(0);
String emailAddress = person.getContact().getElectronicMail();
// retrieve uaa user and delete the user from keycloak
try{
// retrieve uaa user
UaaUser uaaUser = uaaUserRepository.findByUblPerson(person).stream().findFirst().orElseThrow(ControllerUtils.PersonNotFoundException::new);
// delete the user from keycloak
keycloakAdmin.deleteUser(uaaUser.getExternalID());
}catch (ControllerUtils.PersonNotFoundException exception){
logger.error("No UaaUser is found for person with id: {}",person.getID(),exception);
}
// delete the user from UaaUser
uaaUserRepository.deleteByUblPerson(person);
// delete person
personRepository.delete(person);
// remove the user from RocketChat if enabled
if(chatService.isChatEnabled()){
chatService.deleteUser(emailAddress);
}
}
// delete company permanently
adminService.deleteCompanyPermanently(companyId);
// remove party from the solr indexes
for (IndexingClient indexingClient : indexingController.getClients()) {
indexingClient.deleteParty(String.valueOf(companyId), bearer);
}
return ResponseEntity.ok().build();
}
@ApiOperation(value = "Delete company")
@RequestMapping(value = "/delete_company/{companyId}", method = RequestMethod.DELETE)
ResponseEntity<?> deleteCompany(@PathVariable(value = "companyId") long companyId,
@RequestParam(value = "userId") long userId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
if (identityService.hasAnyRole(bearer, OAuthClient.Role.PLATFORM_MANAGER,OAuthClient.Role.COMPANY_ADMIN,
OAuthClient.Role.INITIAL_REPRESENTATIVE,OAuthClient.Role.LEGAL_REPRESENTATIVE,OAuthClient.Role.EXTERNAL_REPRESENTATIVE) == false)
return new ResponseEntity<>("Only platform managers,company_admin, external_representative, "
+ "initial_representative, legal_representative are allowed to delete companies", HttpStatus.UNAUTHORIZED);
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.DELETE_COMPANY.getActivity());
paramMap.put("companyId", String.valueOf(companyId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Deleting company with id {}", companyId);
boolean isCompanyDeleted = adminService.deleteCompany(companyId,bearer,userId);
if(isCompanyDeleted){
//find items idexed by the manufaturer
eu.nimble.service.model.solr.Search search = new Search();
search.setQuery("manufacturerId:"+companyId);
eu.nimble.service.model.solr.SearchResult sr = indexingController.getNimbleIndexClient().searchItem(search, bearer);
List<Object> result = sr.getResult();
Set<String> catIds = new HashSet<String>();
for(Object ob : result){
LinkedHashMap<String,String> lmap = (LinkedHashMap<String, String>) ob;
String catLineId = lmap.get("uri");
String catalogueId = lmap.get("catalogueId");
if(catalogueId != null){
catIds.add(catalogueId);
}
//remove items from indexing
for (IndexingClient indexingClient : indexingController.getClients()) {
indexingClient.removeItem(catLineId, bearer);
}
}
Iterator iterate = catIds.iterator();
while (iterate.hasNext()){
//remove catalogue from the index
for (IndexingClient indexingClient : indexingController.getClients()) {
indexingClient.deleteCatalogue(iterate.next().toString(), bearer);
}
}
//delete party from the indexes
for (IndexingClient indexingClient : indexingController.getClients()) {
indexingClient.deleteParty(String.valueOf(companyId), bearer);
}
// send email to Legal Representatives of the company
PartyType party = partyRepository.findByHjid(companyId).stream().findFirst().orElseThrow(ControllerUtils.CompanyNotFoundException::new);
// enrich persons with roles
identityService.enrichWithRoles(party);
List<PersonType> legalRepresentatives = party.getPerson().stream().filter(personType -> personType.getRole().contains(OAuthClient.Role.LEGAL_REPRESENTATIVE.toString())).collect(Collectors.toList());
emailService.notifyDeletedCompany(legalRepresentatives,party,executionContext.getLanguageId());
return ResponseEntity.ok().build();
}else{
return new ResponseEntity<>("Only company_admin, external_representative, "
+ "initial_representative, legal_representative of company are allowed to delete companies",
HttpStatus.UNAUTHORIZED);
}
}
@ApiOperation(value = "Delete user (marks user as deleted)")
@RequestMapping(value = "/delete_user/{userId}", method = RequestMethod.DELETE)
ResponseEntity<?> deleteUser(@PathVariable(value = "userId") long userId,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
Map<String,String> paramMap = new HashMap<String, String>();
paramMap.put("activity", LogEvent.DELETE_USER.getActivity());
paramMap.put("userId", String.valueOf(userId));
LoggerUtils.logWithMDC(logger, paramMap, LoggerUtils.LogLevel.INFO, "Deleting user with id {}", userId);
boolean status = adminService.deletePerson(userId,bearer,false);
if(status){
return ResponseEntity.ok().build();
}else{
return new ResponseEntity<>("Only platform managers and users by them are allowed to delete users", HttpStatus.UNAUTHORIZED);
}
}
@ApiOperation(value = "Delete user from the platform permanently")
@RequestMapping(value = "/user", method = RequestMethod.DELETE)
ResponseEntity<?> deleteUserPermanently(@RequestParam(value = "username") String username,
@RequestHeader(value = "Authorization") String bearer) throws Exception {
// validate role
if (!identityService.hasAnyRole(bearer, PLATFORM_MANAGER))
return new ResponseEntity<>("Only platform managers are allowed to delete a user from the platform permanently", HttpStatus.FORBIDDEN);
// delete the user from keycloak
keycloakAdmin.deleteUserByUsername(username);
// retrieve uaa user
UaaUser uaaUser = uaaUserRepository.findOneByUsername(username);
if(uaaUser != null){
// retrieve person
PersonType person = uaaUser.getUBLPerson();
// delete the user from UaaUser
uaaUserRepository.deleteByUblPerson(person);
// delete person
personRepository.delete(person);
// delete user invitations
List<UserInvitation> userInvitations = userInvitationRepository.findByEmail(person.getContact().getElectronicMail());
userInvitations.forEach(userInvitation -> userInvitationRepository.delete(userInvitation));
// remove the user from RocketChat if enabled
if(chatService.isChatEnabled()){
chatService.deleteUser(person.getContact().getElectronicMail());
}
}
return ResponseEntity.ok().build();
}
private ResponseEntity<Page<PartyType>> makePage(@RequestParam(value = "page", required = false, defaultValue = "1") int pageNumber, @RequestParam(value = "size", required = false, defaultValue = DEFAULT_PAGE_SIZE) int pageSize, List<PartyType> unverifiedCompanies) {
int start = (pageNumber - 1) * pageSize;
int end = (start + pageSize) > unverifiedCompanies.size() ? unverifiedCompanies.size() : (start + pageSize);
Page<PartyType> companyPage = new PageImpl<>(unverifiedCompanies.subList(start, end), new PageRequest(pageNumber - 1, pageSize), unverifiedCompanies.size());
return ResponseEntity.ok(companyPage);
}
}
| fix the exception message for deleteCompany endpoint
| identity-service/src/main/java/eu/nimble/core/infrastructure/identity/system/AdminController.java | fix the exception message for deleteCompany endpoint |
|
Java | apache-2.0 | b8a78c08b1bc0b78bdc297abda5168ad5ab3412b | 0 | bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr | /*
* #%L
* %%
* Copyright (C) 2020 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.joynr.capabilities;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.ArgumentMatchers;
import org.mockito.Captor;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.joynr.capabilities.LocalCapabilitiesDirectoryImpl.GcdTaskSequencer;
import io.joynr.dispatching.Dispatcher;
import io.joynr.exceptions.JoynrCommunicationException;
import io.joynr.exceptions.JoynrException;
import io.joynr.exceptions.JoynrMessageNotSentException;
import io.joynr.exceptions.JoynrRuntimeException;
import io.joynr.exceptions.JoynrTimeoutException;
import io.joynr.messaging.MessagingQos;
import io.joynr.messaging.routing.RoutingTable;
import io.joynr.messaging.routing.TransportReadyListener;
import io.joynr.provider.AbstractDeferred;
import io.joynr.provider.DeferredVoid;
import io.joynr.provider.Promise;
import io.joynr.provider.PromiseListener;
import io.joynr.proxy.Callback;
import io.joynr.proxy.CallbackWithModeledError;
import io.joynr.proxy.Future;
import io.joynr.proxy.ProxyBuilderFactory;
import io.joynr.runtime.GlobalAddressProvider;
import io.joynr.runtime.JoynrRuntime;
import io.joynr.runtime.ShutdownNotifier;
import io.joynr.util.ObjectMapper;
import joynr.exceptions.ApplicationException;
import joynr.exceptions.ProviderRuntimeException;
import joynr.infrastructure.GlobalCapabilitiesDirectory;
import joynr.system.DiscoveryProvider.Add1Deferred;
import joynr.system.DiscoveryProvider.AddToAllDeferred;
import joynr.system.DiscoveryProvider.Lookup1Deferred;
import joynr.system.DiscoveryProvider.Lookup2Deferred;
import joynr.system.DiscoveryProvider.Lookup3Deferred;
import joynr.system.DiscoveryProvider.Lookup4Deferred;
import joynr.system.RoutingTypes.Address;
import joynr.system.RoutingTypes.MqttAddress;
import joynr.types.CustomParameter;
import joynr.types.DiscoveryEntry;
import joynr.types.DiscoveryEntryWithMetaInfo;
import joynr.types.DiscoveryError;
import joynr.types.DiscoveryQos;
import joynr.types.DiscoveryScope;
import joynr.types.GlobalDiscoveryEntry;
import joynr.types.ProviderQos;
import joynr.types.ProviderScope;
import joynr.types.Version;
@RunWith(MockitoJUnitRunner.class)
public class LocalCapabilitiesDirectoryTest {
private static final Logger logger = LoggerFactory.getLogger(LocalCapabilitiesDirectoryTest.class);
private static final int TEST_TIMEOUT = 10000;
private static final int DEFAULT_WAIT_TIME_MS = 5000; // value should be shorter than TEST_TIMEOUT
private static final String INTERFACE_NAME = "interfaceName";
private static final String TEST_URL = "mqtt://testUrl:42";
private static final long ONE_DAY_IN_MS = 1 * 24 * 60 * 60 * 1000;
private static final long freshnessUpdateIntervalMs = 300;
private static final long DEFAULT_EXPIRY_TIME_MS = 3628800000l;
private static final long RE_ADD_INTERVAL_DAYS = 7l;
private static final long defaultTtlAddAndRemove = MessagingQos.DEFAULT_TTL;
private LocalCapabilitiesDirectory localCapabilitiesDirectory;
private String[] knownGbids = { "testDEFAULTgbid", "testgbid2", "testGbid" };
private Long expiryDateMs = System.currentTimeMillis() + ONE_DAY_IN_MS;
private String publicKeyId = "publicKeyId";
private MqttAddress globalAddress1;
private String globalAddress1Serialized;
private MqttAddress globalAddress2;
private String globalAddress2Serialized;
private MqttAddress globalAddressWithoutGbid;
private String globalAddressWithoutGbidSerialized;
private DiscoveryEntry discoveryEntry;
private DiscoveryEntry expectedDiscoveryEntry;
private GlobalDiscoveryEntry globalDiscoveryEntry;
private GlobalDiscoveryEntry expectedGlobalDiscoveryEntry;
private GlobalDiscoveryEntry provisionedGlobalDiscoveryEntry;
@Mock
JoynrRuntime runtime;
@Mock
private GlobalCapabilitiesDirectoryClient globalCapabilitiesDirectoryClient;
@Mock
private ExpiredDiscoveryEntryCacheCleaner expiredDiscoveryEntryCacheCleaner;
@Mock
private RoutingTable routingTable;
@Mock
private Dispatcher dispatcher;
@Mock
private ProxyBuilderFactory proxyBuilderFactoryMock;
@Mock
private DiscoveryEntryStore<DiscoveryEntry> localDiscoveryEntryStoreMock;
@Mock
private DiscoveryEntryStore<GlobalDiscoveryEntry> globalDiscoveryEntryCacheMock;
@Mock
private GlobalAddressProvider globalAddressProvider;
@Mock
private CapabilitiesProvisioning capabilitiesProvisioning;
@Mock
private ScheduledExecutorService capabilitiesFreshnessUpdateExecutor;
@Mock
private ShutdownNotifier shutdownNotifier;
@Captor
private ArgumentCaptor<Collection<DiscoveryEntryWithMetaInfo>> capabilitiesCaptor;
@Captor
private ArgumentCaptor<Runnable> runnableCaptor;
@Captor
private ArgumentCaptor<GcdTaskSequencer> addRemoveQueueRunnableCaptor;
@Captor
ArgumentCaptor<CallbackWithModeledError<Void, DiscoveryError>> callbackCaptor;
private GcdTaskSequencer gcdTaskSequencerSpy;
private GcdTaskSequencer gcdTaskSequencer;
private Thread addRemoveWorker;
private static class DiscoveryEntryWithUpdatedLastSeenDateMsMatcher implements ArgumentMatcher<DiscoveryEntry> {
@Override
public String toString() {
String description = "expected: " + expected;
return description;
}
private DiscoveryEntry expected;
private DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(DiscoveryEntry expected) {
this.expected = expected;
}
@Override
public boolean matches(DiscoveryEntry argument) {
assertNotNull(argument);
DiscoveryEntry actual = (DiscoveryEntry) argument;
return discoveryEntriesMatchWithUpdatedLastSeenDate(expected, actual);
}
}
private static class GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher
implements ArgumentMatcher<GlobalDiscoveryEntry> {
private GlobalDiscoveryEntry expected;
private GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(GlobalDiscoveryEntry expected) {
this.expected = expected;
}
@Override
public boolean matches(GlobalDiscoveryEntry argument) {
assertNotNull(argument);
GlobalDiscoveryEntry actual = (GlobalDiscoveryEntry) argument;
return globalDiscoveryEntriesMatchWithUpdatedLastSeenDate(expected, actual);
}
}
private static boolean discoveryEntriesMatchWithUpdatedLastSeenDate(DiscoveryEntry expected,
DiscoveryEntry actual) {
return expected.getDomain() == actual.getDomain() && expected.getExpiryDateMs() == actual.getExpiryDateMs()
&& expected.getInterfaceName() == actual.getInterfaceName()
&& expected.getParticipantId() == actual.getParticipantId()
&& expected.getProviderVersion().equals(actual.getProviderVersion())
&& expected.getPublicKeyId() == actual.getPublicKeyId() && expected.getQos().equals(actual.getQos())
&& expected.getLastSeenDateMs() <= actual.getLastSeenDateMs()
&& (expected.getLastSeenDateMs() + 1000) >= actual.getLastSeenDateMs();
}
private static boolean discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(DiscoveryEntryWithMetaInfo expected,
DiscoveryEntryWithMetaInfo actual) {
return discoveryEntriesMatchWithUpdatedLastSeenDate(expected, actual)
&& expected.getIsLocal() == actual.getIsLocal();
}
private static boolean globalDiscoveryEntriesMatchWithUpdatedLastSeenDate(GlobalDiscoveryEntry expected,
GlobalDiscoveryEntry actual) {
return discoveryEntriesMatchWithUpdatedLastSeenDate(expected, actual)
&& expected.getAddress().equals(actual.getAddress());
}
private Field getPrivateField(Class<?> privateClass, String fieldName) {
Field result = null;
try {
result = privateClass.getDeclaredField(fieldName);
} catch (Exception e) {
fail(e.getMessage());
}
return result;
}
private <T> void setFieldValue(Object object, String fieldName, T value) throws IllegalArgumentException,
IllegalAccessException {
Field objectField = getPrivateField(object.getClass(), fieldName);
assertNotNull(objectField);
objectField.setAccessible(true);
objectField.set(object, value);
}
@Before
public void setUp() throws Exception {
ObjectMapper objectMapper = new ObjectMapper();
globalAddress1 = new MqttAddress(knownGbids[0], "testTopic");
globalAddress1Serialized = objectMapper.writeValueAsString(globalAddress1);
globalAddress2 = new MqttAddress(knownGbids[1], "testTopic");
globalAddress2Serialized = objectMapper.writeValueAsString(globalAddress2);
globalAddressWithoutGbid = new MqttAddress("brokerUri", "testTopic");
globalAddressWithoutGbidSerialized = objectMapper.writeValueAsString(globalAddressWithoutGbid);
Field objectMapperField = CapabilityUtils.class.getDeclaredField("objectMapper");
objectMapperField.setAccessible(true);
objectMapperField.set(CapabilityUtils.class, objectMapper);
doAnswer(createAnswerWithSuccess()).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
String discoveryDirectoriesDomain = "io.joynr";
String capabilitiesDirectoryParticipantId = "capDir_participantId";
String capabiltitiesDirectoryTopic = "dirTopic";
GlobalDiscoveryEntry globalCapabilitiesDirectoryDiscoveryEntry = CapabilityUtils.newGlobalDiscoveryEntry(new Version(0,
1),
discoveryDirectoriesDomain,
GlobalCapabilitiesDirectory.INTERFACE_NAME,
capabilitiesDirectoryParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
"provisionedPublicKey",
new MqttAddress(TEST_URL,
capabiltitiesDirectoryTopic));
provisionedGlobalDiscoveryEntry = CapabilityUtils.newGlobalDiscoveryEntry(new Version(0, 1),
"provisioneddomain",
"provisionedInterface",
"provisionedParticipantId",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
"provisionedPublicKey",
new MqttAddress("provisionedbrokeruri",
"provisionedtopic"));
when(capabilitiesProvisioning.getDiscoveryEntries()).thenReturn(new HashSet<GlobalDiscoveryEntry>(Arrays.asList(globalCapabilitiesDirectoryDiscoveryEntry,
provisionedGlobalDiscoveryEntry)));
localCapabilitiesDirectory = new LocalCapabilitiesDirectoryImpl(capabilitiesProvisioning,
globalAddressProvider,
localDiscoveryEntryStoreMock,
globalDiscoveryEntryCacheMock,
routingTable,
globalCapabilitiesDirectoryClient,
expiredDiscoveryEntryCacheCleaner,
freshnessUpdateIntervalMs,
capabilitiesFreshnessUpdateExecutor,
shutdownNotifier,
knownGbids,
DEFAULT_EXPIRY_TIME_MS);
verify(capabilitiesFreshnessUpdateExecutor).schedule(addRemoveQueueRunnableCaptor.capture(),
anyLong(),
eq(TimeUnit.MILLISECONDS));
gcdTaskSequencer = addRemoveQueueRunnableCaptor.getValue();
gcdTaskSequencerSpy = Mockito.spy(gcdTaskSequencer);
addRemoveWorker = new Thread(gcdTaskSequencer);
addRemoveWorker.start();
ProviderQos providerQos = new ProviderQos();
CustomParameter[] parameterList = { new CustomParameter("key1", "value1"),
new CustomParameter("key2", "value2") };
providerQos.setCustomParameters(parameterList);
String participantId = "testParticipantId";
String domain = "domain";
discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain,
INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
expectedDiscoveryEntry = new DiscoveryEntry(discoveryEntry);
globalDiscoveryEntry = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry, globalAddress1);
expectedGlobalDiscoveryEntry = new GlobalDiscoveryEntry(globalDiscoveryEntry);
when(globalAddressProvider.get()).thenReturn(globalAddress1);
when(localDiscoveryEntryStoreMock.lookup(anyString(), anyLong())).thenReturn(Optional.empty());
when(globalDiscoveryEntryCacheMock.lookup(anyString(), anyLong())).thenReturn(Optional.empty());
}
@After
public void tearDown() throws Exception {
gcdTaskSequencer.stop();
addRemoveWorker.join();
}
@Test(timeout = TEST_TIMEOUT)
public void testExpiredDiscoveryEntryCacheCleanerIsInitializenCorrectly() {
verify(expiredDiscoveryEntryCacheCleaner).scheduleCleanUpForCaches(Mockito.<ExpiredDiscoveryEntryCacheCleaner.CleanupAction> any(),
eq(globalDiscoveryEntryCacheMock),
eq(localDiscoveryEntryStoreMock));
}
private void checkAddGlobal_invokesGcdThenLocalStoreWhenGlobalAddSucceeded(String[] expectedGbids,
Function<Void, Promise<? extends AbstractDeferred>> addFunction) throws InterruptedException {
ArgumentCaptor<GlobalDiscoveryEntry> argumentCaptor = ArgumentCaptor.forClass(GlobalDiscoveryEntry.class);
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
Semaphore successCallbackSemaphore1 = new Semaphore(0);
Semaphore successCallbackSemaphore2 = new Semaphore(0);
doAnswer(createAnswerWithSuccess(successCallbackSemaphore1,
successCallbackSemaphore2)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<? extends AbstractDeferred> promise = addFunction.apply(null);
assertTrue(successCallbackSemaphore1.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
successCallbackSemaphore2.release();
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argumentCaptor.capture(),
remainingTtlCapture.capture(),
eq(expectedGbids));
GlobalDiscoveryEntry capturedGlobalDiscoveryEntry = argumentCaptor.getValue();
assertNotNull(capturedGlobalDiscoveryEntry);
checkRemainingTtl(remainingTtlCapture);
assertTrue(globalDiscoveryEntriesMatchWithUpdatedLastSeenDate(expectedGlobalDiscoveryEntry,
capturedGlobalDiscoveryEntry));
verify(localDiscoveryEntryStoreMock).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
}
private void checkRemainingTtl(ArgumentCaptor<Long> remainingTtlCaptor) {
long remainingTtl = remainingTtlCaptor.getValue().longValue();
assertTrue(remainingTtl <= MessagingQos.DEFAULT_TTL);
assertTrue(remainingTtl > (MessagingQos.DEFAULT_TTL / 2.0));
}
@Test(timeout = TEST_TIMEOUT)
public void add_global_invokesGcdAndStore() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
String[] expectedGbids = knownGbids;
Function<Void, Promise<? extends AbstractDeferred>> addFunction = (Void) -> localCapabilitiesDirectory.add(discoveryEntry,
awaitGlobalRegistration);
checkAddGlobal_invokesGcdThenLocalStoreWhenGlobalAddSucceeded(expectedGbids, addFunction);
}
@Test(timeout = TEST_TIMEOUT)
public void addWithGbids_global_singleNonDefaultGbid_invokesGcdAndStore() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1] };
String[] expectedGbids = gbids.clone();
final boolean awaitGlobalRegistration = true;
Function<Void, Promise<? extends AbstractDeferred>> addFunction = (Void) -> localCapabilitiesDirectory.add(discoveryEntry,
awaitGlobalRegistration,
gbids);
checkAddGlobal_invokesGcdThenLocalStoreWhenGlobalAddSucceeded(expectedGbids, addFunction);
}
@Test(timeout = TEST_TIMEOUT)
public void addWithGbids_global_multipleGbids_invokesGcdAndStore() throws InterruptedException {
// expectedGbids element order intentionally differs from knownGbids element order
String[] gbids = new String[]{ knownGbids[1], knownGbids[0] };
String[] expectedGbids = gbids.clone();
final boolean awaitGlobalRegistration = true;
Function<Void, Promise<? extends AbstractDeferred>> addFunction = (Void) -> localCapabilitiesDirectory.add(discoveryEntry,
awaitGlobalRegistration,
gbids);
checkAddGlobal_invokesGcdThenLocalStoreWhenGlobalAddSucceeded(expectedGbids, addFunction);
}
@Test(timeout = TEST_TIMEOUT)
public void addWithGbids_global_emptyGbidArray_addsToKnownBackends() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
String[] gbids = new String[0];
String[] expectedGbids = knownGbids;
Function<Void, Promise<? extends AbstractDeferred>> addFunction = (Void) -> localCapabilitiesDirectory.add(discoveryEntry,
awaitGlobalRegistration,
gbids);
checkAddGlobal_invokesGcdThenLocalStoreWhenGlobalAddSucceeded(expectedGbids, addFunction);
}
@Test(timeout = TEST_TIMEOUT)
public void addToAll_global_invokesGcdAndStore() throws InterruptedException {
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Function<Void, Promise<? extends AbstractDeferred>> addFunction = (Void) -> localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkAddGlobal_invokesGcdThenLocalStoreWhenGlobalAddSucceeded(expectedGbids, addFunction);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerDoesNotCrashOnExceptionAfterAddTaskFinished() throws InterruptedException,
IllegalAccessException {
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
reset(globalCapabilitiesDirectoryClient);
CountDownLatch cdl1 = new CountDownLatch(1);
CountDownLatch cdl2 = new CountDownLatch(1);
@SuppressWarnings("serial")
class TestGde extends GlobalDiscoveryEntry {
TestGde(GlobalDiscoveryEntry gde) {
super(gde);
}
@Override
public Version getProviderVersion() {
cdl1.countDown();
try {
// block GcdTaskSequencer until taskFinished has been called
cdl2.await();
} catch (InterruptedException e) {
// ignore
}
return super.getProviderVersion();
}
}
AtomicBoolean cbCalled = new AtomicBoolean();
TestGde gde = new TestGde(globalDiscoveryEntry);
GcdTask.CallbackCreator callbackCreator = new GcdTask.CallbackCreator() {
@Override
public CallbackWithModeledError<Void, DiscoveryError> createCallback() {
return new CallbackWithModeledError<Void, DiscoveryError>() {
@Override
public void onFailure(DiscoveryError errorEnum) {
// taskFinished is called manually
logger.error("onFailure callback called, DiscoveryError {}", errorEnum);
cbCalled.set(true);
}
@Override
public void onFailure(JoynrRuntimeException runtimeException) {
// taskFinished is called manually
logger.error("onFailure callback called:", runtimeException);
cbCalled.set(true);
}
@Override
public void onSuccess(Void result) {
// taskFinished is called manually
logger.error("onSuccess callback called");
cbCalled.set(true);
}
};
}
};
GcdTask task = GcdTask.createAddTask(callbackCreator, gde, expiryDateMs, knownGbids, true);
gcdTaskSequencer.addTask(task);
assertTrue(cdl1.await(DEFAULT_WAIT_TIME_MS * 100, TimeUnit.MILLISECONDS));
// call taskFinished while task is processed
gcdTaskSequencer.taskFinished();
cdl2.countDown();
verify(globalCapabilitiesDirectoryClient,
timeout(1000).times(1)).add(any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
// check that GcdTaskSequencer is still alive
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
verify(globalCapabilitiesDirectoryClient, timeout(1000).times(2)).add(any(),
any(),
anyLong(),
eq(expectedGbids));
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
assertFalse(cbCalled.get());
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterAddSuccess() throws InterruptedException, IllegalAccessException {
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
reset(globalCapabilitiesDirectoryClient);
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).add(any(), any(), anyLong(), any());
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS * 100, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(callbackCaptor.capture(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.ADD.equals(arg.getMode())));
callbackCaptor.getValue().onSuccess(null);
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterAddTimeoutOnDisabledRetry() throws InterruptedException,
IllegalAccessException {
String[] expectedGbids = knownGbids.clone();
// Retries are disabled when awaitGlobalRegistration is true
final boolean awaitGlobalRegistration = true;
reset(globalCapabilitiesDirectoryClient);
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).add(any(), any(), anyLong(), any());
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(callbackCaptor.capture(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.ADD.equals(arg.getMode())));
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerRetriesAddOnJoynrTimeoutExceptionOnly() throws InterruptedException,
IllegalAccessException {
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = false;
reset(globalCapabilitiesDirectoryClient);
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
Semaphore semaphore = new Semaphore(0);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
semaphore.release();
return null;
}
}).when(globalCapabilitiesDirectoryClient).add(any(), any(), anyLong(), any());
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
assertTrue(semaphore.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).add(callbackCaptor.capture(),
any(),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.ADD.equals(arg.getMode())));
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
verify(gcdTaskSequencerSpy).retryTask();
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
assertTrue(semaphore.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(2)).add(callbackCaptor.capture(),
any(),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy, never()).taskFinished();
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterAddDiscoveryError() throws InterruptedException, IllegalAccessException {
String[] expectedGbids = new String[]{ knownGbids[0] };
final boolean awaitGlobalRegistration = true;
reset(globalCapabilitiesDirectoryClient);
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).add(any(), any(), anyLong(), any());
localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, expectedGbids);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).add(callbackCaptor.capture(),
any(),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.ADD.equals(arg.getMode())));
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void addRemoveAddInSequence_awaitGlobalRegistration_true() throws InterruptedException {
// A sequence of add-remove-add for the same provider could lead to a non registered provider in earlier versions
final Boolean awaitGlobalRegistration = true;
final String participantId = discoveryEntry.getParticipantId();
discoveryEntry.getQos().setScope(ProviderScope.GLOBAL);
expectedDiscoveryEntry = new DiscoveryEntry(discoveryEntry);
globalDiscoveryEntry = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry, globalAddress1);
// checked in remove
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(eq(participantId), anyLong());
Semaphore addSemaphore1 = new Semaphore(0);
Semaphore addSemaphore2 = new Semaphore(0);
doAnswer(createAnswerWithSuccess(addSemaphore1,
addSemaphore2)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
Semaphore removeSemaphore1 = new Semaphore(0);
Semaphore removeSemaphore2 = new Semaphore(0);
doAnswer(createAnswerWithSuccess(removeSemaphore1,
removeSemaphore2)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId),
any(String[].class));
// 3 actions. 1 global add 1 lcd.remove and 1 global add
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
Promise<DeferredVoid> promiseRemove = localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
// add1
assertTrue(addSemaphore1.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
addSemaphore2.release();
checkPromiseSuccess(promiseAdd1, "add failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
// remove
assertTrue(removeSemaphore1.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId),
any(String[].class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(eq(participantId));
removeSemaphore2.release();
checkPromiseSuccess(promiseRemove, "remove failed");
verify(localDiscoveryEntryStoreMock, timeout(1000).times(1)).remove(eq(participantId));
// add2
assertTrue(addSemaphore1.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(2)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
verify(localDiscoveryEntryStoreMock, times(1)).add(any(DiscoveryEntry.class));
addSemaphore2.release();
checkPromiseSuccess(promiseAdd2, "add failed");
verify(localDiscoveryEntryStoreMock,
times(2)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
InOrder inOrderGlobal = inOrder(globalCapabilitiesDirectoryClient);
inOrderGlobal.verify(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
inOrderGlobal.verify(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId),
any(String[].class));
inOrderGlobal.verify(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
InOrder inOrderLocal = inOrder(localDiscoveryEntryStoreMock);
inOrderLocal.verify(localDiscoveryEntryStoreMock, times(1))
.add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
inOrderLocal.verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(participantId));
inOrderLocal.verify(localDiscoveryEntryStoreMock, times(1))
.add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
}
@Test(timeout = TEST_TIMEOUT)
public void addAndRemoveAndAddCalledInRowSameDiscoveryEntry_awaitGlobalRegistration_false() throws InterruptedException {
// A sequence of add-remove-add for the same provider could lead to a non registered provider in earlier versions
// This is still the case for awaitGlobalRegistration = false if the second add is executed before the remove has
// returned from GCD
final Boolean awaitGlobalRegistration = false;
final String participantId = discoveryEntry.getParticipantId();
discoveryEntry.getQos().setScope(ProviderScope.GLOBAL);
expectedDiscoveryEntry = new DiscoveryEntry(discoveryEntry);
globalDiscoveryEntry = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry, globalAddress1);
// checked in remove
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(eq(participantId), anyLong());
Semaphore addSemaphore1 = new Semaphore(0);
Semaphore addSemaphore2 = new Semaphore(0);
doAnswer(createAnswerWithSuccess(addSemaphore1,
addSemaphore2)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
Semaphore removeSemaphore1 = new Semaphore(0);
Semaphore removeSemaphore2 = new Semaphore(0);
doAnswer(createAnswerWithSuccess(removeSemaphore1,
removeSemaphore2)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId),
any(String[].class));
// 3 actions. 1 global add 1 lcd.remove and 1 global add
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(promiseAdd1, "add failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
Promise<DeferredVoid> promiseRemove = localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
checkPromiseSuccess(promiseRemove, "remove failed");
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(promiseAdd2, "add failed");
verify(localDiscoveryEntryStoreMock,
times(2)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
// add1
assertTrue(addSemaphore1.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
addSemaphore2.release();
// remove
assertTrue(removeSemaphore1.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId),
any(String[].class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(eq(participantId));
removeSemaphore2.release();
verify(localDiscoveryEntryStoreMock, timeout(1000).times(1)).remove(eq(participantId));
// add2
assertTrue(addSemaphore1.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(2)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
addSemaphore2.release();
InOrder inOrderGlobal = inOrder(globalCapabilitiesDirectoryClient);
inOrderGlobal.verify(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
inOrderGlobal.verify(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId),
any(String[].class));
inOrderGlobal.verify(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any(String[].class));
InOrder inOrderLocal = inOrder(localDiscoveryEntryStoreMock);
inOrderLocal.verify(localDiscoveryEntryStoreMock, times(1))
.add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
inOrderLocal.verify(localDiscoveryEntryStoreMock, times(1))
.add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
inOrderLocal.verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(participantId));
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
}
@Test(timeout = TEST_TIMEOUT)
public void localAddRemoveAddInSequence_doesNotInvokeGcdAndCache() throws InterruptedException {
final String participantId = discoveryEntry.getParticipantId();
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
expectedDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(eq(participantId), anyLong());
// 3 actions. 1 global add 1 lcd.remove and 1 global add
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry);
Promise<DeferredVoid> promiseRemove = localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseRemove, "remove failed");
checkPromiseSuccess(promiseAdd2, "add failed");
verify(globalDiscoveryEntryCacheMock, never()).add(ArgumentMatchers.<GlobalDiscoveryEntry> any());
verify(globalDiscoveryEntryCacheMock, never()).remove(anyString());
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
InOrder inOrder = inOrder(localDiscoveryEntryStoreMock);
inOrder.verify(localDiscoveryEntryStoreMock, times(1))
.add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
inOrder.verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(participantId));
inOrder.verify(localDiscoveryEntryStoreMock, times(1))
.add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
}
@Test(timeout = TEST_TIMEOUT)
public void add_local_doesNotInvokeGcdAndCache() throws InterruptedException {
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
expectedDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
Thread.sleep(100);
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalCapabilitiesDirectoryClient,
never()).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(globalDiscoveryEntryCacheMock, never()).add(ArgumentMatchers.<GlobalDiscoveryEntry> any());
}
@Test(timeout = TEST_TIMEOUT)
public void addGlobalCapSucceeds_NextAddShallAddGlobalAgain() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
remainingTtlCapture.capture(),
ArgumentMatchers.<String[]> any());
checkRemainingTtl(remainingTtlCapture);
Thread.sleep(1); // make sure that the lastSeenDate of expected entry 2 is larger than the lastSeenDateMs of expected entry 1
DiscoveryEntry expectedDiscoveryEntry2 = new DiscoveryEntry(expectedDiscoveryEntry);
expectedDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
GlobalDiscoveryEntry expectedGlobalDiscoveryEntry2 = new GlobalDiscoveryEntry(expectedGlobalDiscoveryEntry);
expectedGlobalDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
verify(localDiscoveryEntryStoreMock,
times(0)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
doReturn(true).when(localDiscoveryEntryStoreMock).hasDiscoveryEntry(any(DiscoveryEntry.class));
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(anyString(), anyLong());
Promise<DeferredVoid> promise2 = localCapabilitiesDirectory.add(discoveryEntry2, awaitGlobalRegistration);
checkPromiseSuccess(promise2, "add failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry2)),
remainingTtlCapture.capture(),
ArgumentMatchers.<String[]> any());
checkRemainingTtl(remainingTtlCapture);
}
@Test(timeout = TEST_TIMEOUT)
public void addGlobalCapFails_NextAddShallAddGlobalAgain() throws InterruptedException {
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.GLOBAL);
String participantId = LocalCapabilitiesDirectoryTest.class.getName() + ".addLocalAndThanGlobalShallWork";
String domain = "testDomain";
final DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain,
INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
final DiscoveryEntry expectedDiscoveryEntry = new DiscoveryEntry(discoveryEntry);
globalDiscoveryEntry = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
ProviderRuntimeException exception = new ProviderRuntimeException("add failed");
doAnswer(createVoidAnswerWithException(exception)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseException(promise, new ProviderRuntimeException(exception.toString()));
ArgumentCaptor<Long> remainingTtlCaptor = ArgumentCaptor.forClass(Long.class);
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
remainingTtlCaptor.capture(),
ArgumentMatchers.<String[]> any());
checkRemainingTtl(remainingTtlCaptor);
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
reset(globalCapabilitiesDirectoryClient, localDiscoveryEntryStoreMock);
doAnswer(createAnswerWithSuccess()).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
DiscoveryEntry expectedDiscoveryEntry2 = new DiscoveryEntry(expectedDiscoveryEntry);
expectedDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
GlobalDiscoveryEntry expectedGlobalDiscoveryEntry2 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
expectedGlobalDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry2)),
remainingTtlCaptor.capture(),
ArgumentMatchers.<String[]> any());
checkRemainingTtl(remainingTtlCaptor);
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
}
private void testAddWithGbidsIsProperlyRejected(DiscoveryError expectedError,
boolean awaitGlobalRegistration) throws InterruptedException {
reset(globalCapabilitiesDirectoryClient, localDiscoveryEntryStoreMock);
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
String[] gbids = new String[]{ knownGbids[0] };
String[] expectedGbids = gbids.clone();
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids);
if (awaitGlobalRegistration) {
checkPromiseError(promise, expectedError);
} else {
checkPromiseSuccess(promise, "add withoud awaitGlobalRegistration failed");
}
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
verify(localDiscoveryEntryStoreMock, times(awaitGlobalRegistration ? 0 : 1)).add(any(DiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(any(String.class));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
}
private void testAddIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
reset(globalCapabilitiesDirectoryClient);
String[] expectedGbids = knownGbids.clone();
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseErrorInProviderRuntimeException(promise, expectedError);
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(any(String.class));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbidsIsProperlyRejectedAndHandlesDiscoveryError() throws InterruptedException {
final boolean awaitGlobal = true;
testAddWithGbidsIsProperlyRejected(DiscoveryError.INVALID_GBID, awaitGlobal);
testAddWithGbidsIsProperlyRejected(DiscoveryError.UNKNOWN_GBID, awaitGlobal);
testAddWithGbidsIsProperlyRejected(DiscoveryError.INTERNAL_ERROR, awaitGlobal);
final boolean noAwaitGlobal = false;
testAddWithGbidsIsProperlyRejected(DiscoveryError.INVALID_GBID, noAwaitGlobal);
testAddWithGbidsIsProperlyRejected(DiscoveryError.UNKNOWN_GBID, noAwaitGlobal);
testAddWithGbidsIsProperlyRejected(DiscoveryError.INTERNAL_ERROR, noAwaitGlobal);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddIsProperlyRejectedAndHandlesDiscoveryError() throws InterruptedException {
testAddIsProperlyRejected(DiscoveryError.INVALID_GBID);
testAddIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
testAddIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
private void testAddReturnsDiscoveryError(String[] gbids,
DiscoveryError expectedError) throws InterruptedException {
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, true, gbids);
checkPromiseError(promise, expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_unknownGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], "unknown" };
testAddReturnsDiscoveryError(gbids, DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_invalidGbid_emptyGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], "" };
testAddReturnsDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_invalidGbid_duplicateGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], knownGbids[1] };
testAddReturnsDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_invalidGbid_nullGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], null };
testAddReturnsDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_invalidGbid_nullGbidArray() throws InterruptedException {
String[] gbids = null;
testAddReturnsDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withoutAwaitGlobalRegistration_retryAfterTimeout() throws InterruptedException {
CountDownLatch cdl = new CountDownLatch(2);
doAnswer(createVoidAnswerWithException(cdl,
new JoynrTimeoutException(0))).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = false;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
atLeast(2)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any());
checkPromiseSuccess(promise, "add failed");
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withAwaitGlobalRegistration_noRetryAfterTimeout() throws InterruptedException {
JoynrTimeoutException timeoutException = new JoynrTimeoutException(0);
ProviderRuntimeException expectedException = new ProviderRuntimeException(timeoutException.toString());
doAnswer(createVoidAnswerWithException(timeoutException)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseException(promise, expectedException);
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any());
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withoutAwaitGlobalRegistration_noRetryAfterRuntimeException() throws InterruptedException {
JoynrRuntimeException runtimeException = new JoynrRuntimeException("custom runtime exception");
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithException(cdl,
runtimeException)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = false;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any());
verify(localDiscoveryEntryStoreMock, times(0)).remove(eq(globalDiscoveryEntry.getParticipantId()));
checkPromiseSuccess(promise, "add failed");
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withoutAwaitGlobalRegistration_noRetryAfterDiscoveryError() throws InterruptedException {
DiscoveryError expectedError = DiscoveryError.UNKNOWN_GBID;
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithDiscoveryError(cdl,
expectedError)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = false;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any());
verify(localDiscoveryEntryStoreMock, times(0)).remove(eq(globalDiscoveryEntry.getParticipantId()));
checkPromiseSuccess(promise, "add failed");
}
private void globalAddUsesCorrectRemainingTtl(boolean awaitGlobalRegistration) throws InterruptedException {
int defaultTtl = MessagingQos.DEFAULT_TTL;
DiscoveryEntry discoveryEntry1 = new DiscoveryEntry(discoveryEntry);
discoveryEntry1.setParticipantId("participantId1");
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
discoveryEntry2.setParticipantId("participantId2");
GlobalDiscoveryEntry globalDiscoveryEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry1,
globalAddress1);
GlobalDiscoveryEntry globalDiscoveryEntry2 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry2,
globalAddress1);
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
CountDownLatch startOfFirstAddCdl = new CountDownLatch(1);
CountDownLatch endOfFirstAddCdl = new CountDownLatch(1);
long sleepTime = 1000l;
doAnswer(createAnswerWithDelayedSuccess(startOfFirstAddCdl,
endOfFirstAddCdl,
sleepTime)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry1)),
anyLong(),
ArgumentMatchers.<String[]> any());
CountDownLatch secondAddCdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(secondAddCdl)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry2)),
anyLong(),
ArgumentMatchers.<String[]> any());
localCapabilitiesDirectory.add(discoveryEntry1, awaitGlobalRegistration);
localCapabilitiesDirectory.add(discoveryEntry2, awaitGlobalRegistration);
assertTrue(startOfFirstAddCdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry1)),
remainingTtlCapture.capture(),
any());
long firstNow = System.currentTimeMillis();
long capturedFirstAddRemainingTtl = remainingTtlCapture.getValue();
assertTrue(endOfFirstAddCdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
assertTrue(secondAddCdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry2)),
remainingTtlCapture.capture(),
any());
long secondNow = System.currentTimeMillis();
long delta = secondNow - firstNow;
long capturedSecondAddRemainingTtl = remainingTtlCapture.getValue();
long epsilon = 300;
if (awaitGlobalRegistration) {
assertTrue(capturedFirstAddRemainingTtl <= defaultTtl);
assertTrue(capturedFirstAddRemainingTtl > defaultTtl - epsilon);
assertTrue(capturedSecondAddRemainingTtl <= defaultTtl - delta + epsilon);
assertTrue(capturedSecondAddRemainingTtl > defaultTtl - delta - epsilon);
} else {
assertEquals(capturedFirstAddRemainingTtl, defaultTtl);
assertEquals(capturedSecondAddRemainingTtl, defaultTtl);
}
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withAwaitGlobalRegistration_usesCorrectRemainingTtl() throws InterruptedException {
globalAddUsesCorrectRemainingTtl(true);
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withoutAwaitGlobalRegistration_usesCorrectRemainingTtl() throws InterruptedException {
globalAddUsesCorrectRemainingTtl(false);
}
@Test(timeout = TEST_TIMEOUT)
public void addSameGbidTwiceInARow() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
String[] gbids = new String[]{ knownGbids[0] };
String[] expectedGbids = gbids.clone();
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids);
checkPromiseSuccess(promise, "add failed");
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
ArgumentCaptor<Long> remainingTtlCaptor = ArgumentCaptor.forClass(Long.class);
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
remainingTtlCaptor.capture(),
eq(expectedGbids));
checkRemainingTtl(remainingTtlCaptor);
Thread.sleep(1); // make sure that the lastSeenDate of expected entry 2 is larger than the lastSeenDateMs of expected entry 1
DiscoveryEntry expectedDiscoveryEntry2 = new DiscoveryEntry(expectedDiscoveryEntry);
expectedDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
GlobalDiscoveryEntry expectedGlobalDiscoveryEntry2 = new GlobalDiscoveryEntry(expectedGlobalDiscoveryEntry);
expectedGlobalDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
doReturn(true).when(localDiscoveryEntryStoreMock).hasDiscoveryEntry(any(DiscoveryEntry.class));
doReturn(Optional.of(globalDiscoveryEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(expectedDiscoveryEntry.getParticipantId()), anyLong());
Promise<Add1Deferred> promise2 = localCapabilitiesDirectory.add(discoveryEntry2,
awaitGlobalRegistration,
gbids);
checkPromiseSuccess(promise2, "add failed");
// entry is added again (with newer lastSeenDateMs)
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry2)),
remainingTtlCaptor.capture(),
eq(expectedGbids));
checkRemainingTtl(remainingTtlCaptor);
}
@Test(timeout = TEST_TIMEOUT)
public void addDifferentGbidsAfterEachOther() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
String[] gbids1 = new String[]{ knownGbids[0] };
String[] expectedGbids1 = gbids1.clone();
String[] gbids2 = new String[]{ knownGbids[1] };
String[] expectedGbids2 = gbids2.clone();
DiscoveryEntryWithMetaInfo expectedEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
discoveryEntry);
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
doAnswer(createAnswerWithSuccess()).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids1);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
ArgumentCaptor<Long> remainingTtlCaptor = ArgumentCaptor.forClass(Long.class);
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
remainingTtlCaptor.capture(),
eq(expectedGbids1));
checkRemainingTtl(remainingTtlCaptor);
Thread.sleep(1); // make sure that the lastSeenDate of expected entry 2 is larger than the lastSeenDateMs of expected entry 1
DiscoveryEntry expectedDiscoveryEntry2 = new DiscoveryEntry(expectedDiscoveryEntry);
expectedDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
GlobalDiscoveryEntry expectedGlobalDiscoveryEntry2 = new GlobalDiscoveryEntry(expectedGlobalDiscoveryEntry);
expectedGlobalDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
verify(localDiscoveryEntryStoreMock,
times(0)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
doReturn(true).when(localDiscoveryEntryStoreMock).hasDiscoveryEntry(any(DiscoveryEntry.class));
Promise<Add1Deferred> promise2 = localCapabilitiesDirectory.add(discoveryEntry2,
awaitGlobalRegistration,
gbids2);
checkPromiseSuccess(promise2, "add failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry2)),
remainingTtlCaptor.capture(),
eq(expectedGbids2));
checkRemainingTtl(remainingTtlCaptor);
// provider is now registered for both GBIDs
doReturn(Arrays.asList(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookupGlobalEntries(eq(new String[]{
expectedDiscoveryEntry.getDomain() }), eq(INTERFACE_NAME));
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
Promise<Lookup2Deferred> promiseLookup1 = localCapabilitiesDirectory.lookup(new String[]{
expectedDiscoveryEntry.getDomain() }, expectedDiscoveryEntry.getInterfaceName(), discoveryQos, gbids1);
Promise<Lookup2Deferred> promiseLookup2 = localCapabilitiesDirectory.lookup(new String[]{
expectedDiscoveryEntry.getDomain() }, expectedDiscoveryEntry.getInterfaceName(), discoveryQos, gbids2);
DiscoveryEntryWithMetaInfo[] result1 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup1,
"lookup failed")[0];
assertEquals(1, result1.length);
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntryWithMetaInfo, result1[0]));
DiscoveryEntryWithMetaInfo[] result2 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup2,
"lookup failed")[0];
assertEquals(1, result2.length);
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntryWithMetaInfo, result2[0]));
verify(globalCapabilitiesDirectoryClient,
times(0)).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
ArgumentMatchers.<String[]> any(),
anyString(),
anyLong(),
ArgumentMatchers.<String[]> any());
}
void checkAddRemovesCachedEntryWithSameParticipantId(ProviderScope scope) throws InterruptedException {
discoveryEntry.getQos().setScope(scope);
expectedDiscoveryEntry.getQos().setScope(scope);
doReturn(false).when(localDiscoveryEntryStoreMock)
.hasDiscoveryEntry(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
doReturn(Optional.of(globalDiscoveryEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(expectedDiscoveryEntry.getParticipantId()),
eq(Long.MAX_VALUE));
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock).hasDiscoveryEntry(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(localDiscoveryEntryStoreMock, never()).lookup(any(), any());
verify(globalDiscoveryEntryCacheMock, times(1)).lookup(eq(expectedGlobalDiscoveryEntry.getParticipantId()),
eq(Long.MAX_VALUE));
verify(globalDiscoveryEntryCacheMock, times(1)).remove(eq(expectedGlobalDiscoveryEntry.getParticipantId()));
int calls = (scope == ProviderScope.GLOBAL ? 1 : 0);
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient, times(calls)).add(any(), any(), anyLong(), any());
}
@Test(timeout = TEST_TIMEOUT)
public void add_removesCachedEntryWithSameParticipantId_ProviderScope_LOCAL() throws InterruptedException {
checkAddRemovesCachedEntryWithSameParticipantId(ProviderScope.LOCAL);
}
@Test(timeout = TEST_TIMEOUT)
public void add_removesCachedEntryWithSameParticipantId_ProviderScope_GLOBAL() throws InterruptedException {
checkAddRemovesCachedEntryWithSameParticipantId(ProviderScope.GLOBAL);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddKnownLocalEntryDoesNothing() throws InterruptedException {
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
expectedDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
doReturn(true).when(localDiscoveryEntryStoreMock)
.hasDiscoveryEntry(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(expectedDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, false, knownGbids);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock).hasDiscoveryEntry(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(localDiscoveryEntryStoreMock).lookup(eq(expectedDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
verify(globalDiscoveryEntryCacheMock, never()).lookup(anyString(), anyLong());
verify(globalDiscoveryEntryCacheMock, never()).remove(anyString());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient, never()).add(any(), any(), anyLong(), any());
}
@Test(timeout = TEST_TIMEOUT)
public void testAddKnownLocalEntryWithDifferentExpiryDateAddsAgain() throws InterruptedException {
DiscoveryEntry newDiscoveryEntry = new DiscoveryEntry(discoveryEntry);
newDiscoveryEntry.setExpiryDateMs(discoveryEntry.getExpiryDateMs() + 1);
newDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
doReturn(true).when(localDiscoveryEntryStoreMock).hasDiscoveryEntry(newDiscoveryEntry);
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(newDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(newDiscoveryEntry, false, knownGbids);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock).hasDiscoveryEntry(newDiscoveryEntry);
verify(localDiscoveryEntryStoreMock).lookup(eq(newDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
verify(localDiscoveryEntryStoreMock).add(eq(newDiscoveryEntry));
// check whether the local entry is in the global cache (unlikely). If so, then remove it
verify(globalDiscoveryEntryCacheMock, times(1)).lookup(anyString(), anyLong());
verify(globalDiscoveryEntryCacheMock, never()).remove(anyString());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient, never()).add(any(), any(), anyLong(), any());
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGlobalAddressProviderThrowingException() throws InterruptedException {
when(globalAddressProvider.get()).thenThrow(new JoynrRuntimeException());
final boolean awaitGlobalRegistration = true;
localCapabilitiesDirectory.add(globalDiscoveryEntry, awaitGlobalRegistration, knownGbids);
verify(globalAddressProvider).registerGlobalAddressesReadyListener((TransportReadyListener) localCapabilitiesDirectory);
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient, times(0)).add(any(), any(), anyLong(), any());
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAll() throws InterruptedException {
boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
checkPromiseSuccess(promise, "addToAll failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
remainingTtlCapture.capture(),
eq(knownGbids));
checkRemainingTtl(remainingTtlCapture);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAll_local() throws InterruptedException {
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
expectedDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "addToAll failed");
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
never()).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAllIsProperlyRejected_exception() throws InterruptedException {
String[] expectedGbids = knownGbids.clone();
JoynrRuntimeException exception = new JoynrRuntimeException("add failed");
ProviderRuntimeException expectedException = new ProviderRuntimeException(exception.toString());
doAnswer(createVoidAnswerWithException(exception)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry, true);
checkPromiseException(promise, expectedException);
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(any(String.class));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
}
private void testAddToAllIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String[] expectedGbids = knownGbids.clone();
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry, true);
checkPromiseError(promise, expectedError);
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(any(String.class));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAllIsProperlyRejected_internalError() throws InterruptedException {
DiscoveryError expectedError = DiscoveryError.INTERNAL_ERROR;
testAddToAllIsProperlyRejected(expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAllIsProperlyRejected_invalidGbid() throws InterruptedException {
DiscoveryError expectedError = DiscoveryError.INVALID_GBID;
testAddToAllIsProperlyRejected(expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAllIsProperlyRejected_unknownGbid() throws InterruptedException {
DiscoveryError expectedError = DiscoveryError.UNKNOWN_GBID;
testAddToAllIsProperlyRejected(expectedError);
}
private static Answer<Future<List<GlobalDiscoveryEntry>>> createLookupAnswer(final List<GlobalDiscoveryEntry> caps) {
return new Answer<Future<List<GlobalDiscoveryEntry>>>() {
@Override
public Future<List<GlobalDiscoveryEntry>> answer(InvocationOnMock invocation) throws Throwable {
Future<List<GlobalDiscoveryEntry>> result = new Future<List<GlobalDiscoveryEntry>>();
@SuppressWarnings("unchecked")
Callback<List<GlobalDiscoveryEntry>> callback = (Callback<List<GlobalDiscoveryEntry>>) invocation.getArguments()[0];
callback.onSuccess(caps);
result.onSuccess(caps);
return result;
}
};
}
private static Answer<Future<GlobalDiscoveryEntry>> createLookupAnswer(final GlobalDiscoveryEntry caps) {
return new Answer<Future<GlobalDiscoveryEntry>>() {
@Override
public Future<GlobalDiscoveryEntry> answer(InvocationOnMock invocation) throws Throwable {
Future<GlobalDiscoveryEntry> result = new Future<GlobalDiscoveryEntry>();
@SuppressWarnings("unchecked")
Callback<GlobalDiscoveryEntry> callback = (Callback<GlobalDiscoveryEntry>) invocation.getArguments()[0];
callback.onSuccess(caps);
result.onSuccess(caps);
return result;
}
};
}
private static Answer<Void> createAnswerWithSuccess() {
return createAnswerWithSuccess((Semaphore) null, null);
}
private static Answer<Void> createAnswerWithSuccess(Semaphore successCallbackSemaphore1,
Semaphore successCallbackSemaphore2) {
return new Answer<Void>() {
@SuppressWarnings("unchecked")
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
if (successCallbackSemaphore1 != null) {
logger.debug("success callback called");
successCallbackSemaphore1.release();
}
if (successCallbackSemaphore2 != null) {
logger.debug("waiting for Semaphore in success callback");
successCallbackSemaphore2.tryAcquire(TEST_TIMEOUT, TimeUnit.MILLISECONDS);
}
((Callback<Void>) args[0]).onSuccess(null);
return null;
}
};
}
private static Answer<Void> createAnswerWithSuccess(CountDownLatch cdl) {
return new Answer<Void>() {
@SuppressWarnings("unchecked")
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
((Callback<Void>) args[0]).onSuccess(null);
cdl.countDown();
return null;
}
};
}
private static Answer<Void> createAnswerWithDelayedSuccess(CountDownLatch cdlStart,
CountDownLatch cdlDone,
long delay) {
return new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
new Thread(new Runnable() {
@Override
public void run() {
cdlStart.countDown();
try {
Thread.sleep(delay);
} catch (Exception e) {
fail("SLEEP INTERRUPTED");
}
callback.onSuccess(null);
cdlDone.countDown();
}
}).start();
return null;
}
};
}
private static Answer<Void> createVoidAnswerWithException(JoynrRuntimeException exception) {
CountDownLatch cdl = new CountDownLatch(0);
return createVoidAnswerWithException(cdl, exception);
}
private static Answer<Void> createVoidAnswerWithDiscoveryError(DiscoveryError error) {
CountDownLatch cdl = new CountDownLatch(0);
return createVoidAnswerWithDiscoveryError(cdl, error);
}
private static Answer<Void> createVoidAnswerWithException(CountDownLatch cdl, JoynrRuntimeException exception) {
return new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
callback.onFailure(exception);
cdl.countDown();
return null;
}
};
}
private static Answer<Void> createVoidAnswerWithDiscoveryError(CountDownLatch cdl, DiscoveryError error) {
return new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
@SuppressWarnings("unchecked")
CallbackWithModeledError<Void, DiscoveryError> callback = ((CallbackWithModeledError<Void, DiscoveryError>) args[0]);
callback.onFailure(error);
cdl.countDown();
return null;
}
};
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterface_globalOnly() throws InterruptedException {
List<GlobalDiscoveryEntry> caps = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String[] domains = new String[]{ domain1 };
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 1000L, DiscoveryScope.GLOBAL_ONLY, false);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(new ArrayList<GlobalDiscoveryEntry>());
doAnswer(createLookupAnswer(caps)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
0);
verify(routingTable, never()).incrementReferenceCount(any());
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
"localParticipant",
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(promiseAdd, "add failed");
Promise<Lookup1Deferred> promise2 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(2,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise2,
0);
verify(routingTable, never()).incrementReferenceCount(any());
// even deleting local cap entries shall have no effect, the global cap dir shall be invoked
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry));
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
verify(localDiscoveryEntryStoreMock).remove(discoveryEntry.getParticipantId());
Promise<Lookup1Deferred> promise3 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(3,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise3,
0);
verify(routingTable, never()).incrementReferenceCount(any());
// add global entry
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
caps.add(capInfo);
doAnswer(createLookupAnswer(caps)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise4 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(4,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise4,
1); // 1 global entry
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
verify(routingTable, never()).incrementReferenceCount(any());
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
reset((Object) globalDiscoveryEntryCacheMock);
reset((Object) routingTable);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Arrays.asList(capInfo));
Promise<Lookup1Deferred> promise5 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(4,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise5,
1); // 1 cached entry
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
verify(routingTable, never()).incrementReferenceCount(any());
reset((Object) routingTable);
// and now, invalidate the existing cached global values, resulting in another call to globalcapclient
discoveryQos.setCacheMaxAge(0L);
Thread.sleep(1);
// now, another lookup call shall call the globalCapabilitiesDirectoryClient, as the global cap dir is expired
Promise<Lookup1Deferred> promise6 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(5,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise6,
1); // 1 global entry
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
verify(routingTable, never()).incrementReferenceCount(any());
reset(globalCapabilitiesDirectoryClient);
}
private Object[] verifyGcdLookupAndPromiseFulfillment(int gcdTimesCalled,
String[] domains,
String interfaceName,
long discoveryTimeout,
String[] gbids,
Promise<?> promise,
int numberOfReturnedValues) throws InterruptedException {
verify(globalCapabilitiesDirectoryClient,
times(gcdTimesCalled)).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(interfaceName),
eq(discoveryTimeout),
eq(gbids));
Object[] values = checkPromiseSuccess(promise, "Unexpected rejection in global lookup");
assertEquals(numberOfReturnedValues, ((DiscoveryEntryWithMetaInfo[]) values[0]).length);
return values;
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterface_localThenGlobal() throws InterruptedException {
List<GlobalDiscoveryEntry> caps = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String[] domains = new String[]{ domain1 };
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 1000L, DiscoveryScope.LOCAL_THEN_GLOBAL, false);
doAnswer(createLookupAnswer(caps)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
0);
verify(routingTable, never()).incrementReferenceCount(any());
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
String localParticipantId = "localParticipant";
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
localParticipantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
reset((Object) localDiscoveryEntryStoreMock);
when(localDiscoveryEntryStoreMock.lookup(eq(domains),
eq(interfaceName1))).thenReturn(Arrays.asList(discoveryEntry));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 local entry
verify(routingTable, times(1)).incrementReferenceCount(eq(localParticipantId));
// add global entry
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
caps.add(capInfo);
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 local entry
verify(routingTable, times(2)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, never()).put(anyString(), any(Address.class), eq(true), anyLong());
// without local entry, the global cap dir is called
reset((Object) localDiscoveryEntryStoreMock);
when(localDiscoveryEntryStoreMock.lookup(anyString(), anyLong())).thenReturn(Optional.empty());
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(2,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 global entry
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Arrays.asList(capInfo));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(2,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 cached entry
verify(routingTable, times(2)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
// and now, invalidate the existing cached global values, resulting in another call to globalcapclient
discoveryQos.setCacheMaxAge(0L);
Thread.sleep(1);
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(3,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 global entry
verify(routingTable, times(3)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantId_defaultScopelocalAndGlobal_localEntry() throws InterruptedException {
DiscoveryQos discoveryQos = new DiscoveryQos(Long.MAX_VALUE,
Long.MAX_VALUE,
DiscoveryScope.LOCAL_AND_GLOBAL,
false);
DiscoveryEntryWithMetaInfo expectedDiscoveryEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
expectedDiscoveryEntry);
// add locally registered entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
when(localDiscoveryEntryStoreMock.lookup(eq(expectedDiscoveryEntry.getParticipantId()),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Optional.of(discoveryEntry));
reset(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
Promise<Lookup3Deferred> lookupPromise = localCapabilitiesDirectory.lookup(expectedDiscoveryEntry.getParticipantId());
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo retrievedCapabilityEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(expectedDiscoveryEntryWithMetaInfo, retrievedCapabilityEntry);
verify(localDiscoveryEntryStoreMock).lookup(eq(expectedDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
verify(routingTable, times(1)).incrementReferenceCount(eq(discoveryEntry.getParticipantId()));
verify(routingTable, never()).put(eq(discoveryEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterface_localAndGlobal() throws InterruptedException {
List<GlobalDiscoveryEntry> globalEntries = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String[] domains = new String[]{ domain1 };
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.LOCAL_AND_GLOBAL, false);
doAnswer(createLookupAnswer(globalEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
0);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
String localParticipantId = "localParticipant";
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
localParticipantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
when(localDiscoveryEntryStoreMock.lookup(eq(domains),
eq(interfaceName1))).thenReturn(Arrays.asList(discoveryEntry));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(2,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 local entry
verify(routingTable, times(1)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, never()).put(anyString(), any(Address.class), eq(true), anyLong());
// add global entry
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
globalEntries.add(capInfo);
doAnswer(createLookupAnswer(globalEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(3,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 1 local, 1 global entry
verify(routingTable, times(2)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Arrays.asList(capInfo));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(3,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 1 local, 1 cached entry
verify(routingTable, times(3)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, times(2)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
// and now, invalidate the existing cached global values, resulting in another call to glocalcapclient
discoveryQos.setCacheMaxAge(0L);
Thread.sleep(1);
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(4,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 1 local, 1 global entry
verify(routingTable, times(4)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, times(3)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterface_emptyGbid_replacesReturnedGbidsWithEmpty() throws InterruptedException {
String[] gbids = new String[]{ "" };
LocalCapabilitiesDirectoryImpl localCapabilitiesDirectoryWithEmptyGbids = new LocalCapabilitiesDirectoryImpl(capabilitiesProvisioning,
globalAddressProvider,
localDiscoveryEntryStoreMock,
globalDiscoveryEntryCacheMock,
routingTable,
globalCapabilitiesDirectoryClient,
expiredDiscoveryEntryCacheCleaner,
freshnessUpdateIntervalMs,
capabilitiesFreshnessUpdateExecutor,
shutdownNotifier,
gbids,
DEFAULT_EXPIRY_TIME_MS);
List<GlobalDiscoveryEntry> globalEntries = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String[] domains = new String[]{ domain1 };
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.LOCAL_AND_GLOBAL, false);
// add global entries
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
globalEntries.add(capInfo);
String globalParticipantId2 = "globalParticipant2";
GlobalDiscoveryEntry capInfo2 = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId2,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
globalEntries.add(capInfo2);
doAnswer(createLookupAnswer(globalEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(gbids));
Promise<Lookup2Deferred> promise = localCapabilitiesDirectoryWithEmptyGbids.lookup(domains,
interfaceName1,
discoveryQos,
new String[]{});
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
gbids,
promise,
2);
verify(routingTable, never()).incrementReferenceCount(any());
ArgumentCaptor<Address> addressCaptor = ArgumentCaptor.forClass(Address.class);
verify(routingTable, times(1)).put(eq(globalParticipantId), addressCaptor.capture(), eq(true), anyLong());
MqttAddress address = (MqttAddress) addressCaptor.getValue();
assertEquals(gbids[0], address.getBrokerUri());
verify(routingTable, times(1)).put(eq(globalParticipantId2), addressCaptor.capture(), eq(true), anyLong());
address = (MqttAddress) addressCaptor.getValue();
assertEquals(gbids[0], address.getBrokerUri());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantId_emptyGbid_replacesReturnedGbidsWithEmpty() throws InterruptedException {
String[] gbids = new String[]{ "" };
LocalCapabilitiesDirectoryImpl localCapabilitiesDirectoryWithEmptyGbids = new LocalCapabilitiesDirectoryImpl(capabilitiesProvisioning,
globalAddressProvider,
localDiscoveryEntryStoreMock,
globalDiscoveryEntryCacheMock,
routingTable,
globalCapabilitiesDirectoryClient,
expiredDiscoveryEntryCacheCleaner,
freshnessUpdateIntervalMs,
capabilitiesFreshnessUpdateExecutor,
shutdownNotifier,
gbids,
DEFAULT_EXPIRY_TIME_MS);
String domain1 = "domain1";
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.LOCAL_AND_GLOBAL, false);
// add global entry
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
doAnswer(createLookupAnswer(capInfo)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(globalParticipantId),
eq(discoveryQos.getDiscoveryTimeout()),
eq(gbids));
Promise<Lookup4Deferred> promise = localCapabilitiesDirectoryWithEmptyGbids.lookup(globalParticipantId,
discoveryQos,
new String[]{});
checkPromiseSuccess(promise, "lookup failed");
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(globalParticipantId),
eq(discoveryQos.getDiscoveryTimeout()),
eq(gbids));
verify(routingTable, never()).incrementReferenceCount(any());
ArgumentCaptor<Address> addressCaptor = ArgumentCaptor.forClass(Address.class);
verify(routingTable, times(1)).put(eq(globalParticipantId), addressCaptor.capture(), eq(true), anyLong());
MqttAddress address = (MqttAddress) addressCaptor.getValue();
assertEquals(gbids[0], address.getBrokerUri());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_globalOnly_filtersRemoteCachedEntriesByGbids() throws InterruptedException {
String domain = "domain";
String[] domainsForLookup = new String[]{ domain };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
GlobalDiscoveryEntry cachedEntryForGbid1 = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
interfaceName,
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
GlobalDiscoveryEntry cachedEntryForGbid2 = new GlobalDiscoveryEntry(cachedEntryForGbid1);
cachedEntryForGbid2.setParticipantId("participantId2");
cachedEntryForGbid2.setAddress(globalAddress2Serialized);
DiscoveryEntryWithMetaInfo expectedEntry1 = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedEntryForGbid1);
DiscoveryEntryWithMetaInfo expectedEntry2 = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedEntryForGbid2);
doReturn(Arrays.asList(cachedEntryForGbid1, cachedEntryForGbid2)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domainsForLookup),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()));
Promise<Lookup2Deferred> promise1 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
new String[]{ knownGbids[1] });
DiscoveryEntryWithMetaInfo[] result1 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promise1,
"lookup failed")[0];
verify(routingTable, times(1)).put(eq(expectedEntry2.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, never()).put(eq(expectedEntry1.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, never()).incrementReferenceCount(anyString());
assertEquals(1, result1.length);
assertEquals(expectedEntry2, result1[0]);
reset((Object) routingTable);
Promise<Lookup2Deferred> promise2 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
new String[]{ knownGbids[0] });
DiscoveryEntryWithMetaInfo[] result2 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promise2,
"lookup failed")[0];
verify(routingTable, times(1)).put(eq(expectedEntry1.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, never()).put(eq(expectedEntry2.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, never()).incrementReferenceCount(anyString());
assertEquals(1, result2.length);
assertEquals(expectedEntry1, result2[0]);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_globalOnly_filtersLocalEntriesByGbids() throws InterruptedException {
String domain = "domain";
String[] domainsForLookup = new String[]{ domain };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
DiscoveryEntry localEntry1 = new DiscoveryEntry(new Version(47, 11),
domain,
interfaceName,
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
DiscoveryEntryWithMetaInfo expectedEntry1 = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry1);
DiscoveryEntry localEntry2 = new DiscoveryEntry(localEntry1);
localEntry2.setParticipantId("participantId2");
DiscoveryEntryWithMetaInfo expectedEntry2 = new DiscoveryEntryWithMetaInfo(expectedEntry1);
expectedEntry2.setParticipantId(localEntry2.getParticipantId());
doReturn(Arrays.asList(localEntry1, localEntry2)).when(localDiscoveryEntryStoreMock)
.lookupGlobalEntries(eq(domainsForLookup), eq(interfaceName));
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(localEntry1, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
promiseAdd = localCapabilitiesDirectory.add(localEntry2, true, new String[]{ knownGbids[1] });
checkPromiseSuccess(promiseAdd, "add failed");
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock).add(eq(localEntry1));
verify(localDiscoveryEntryStoreMock).add(eq(localEntry2));
Promise<Lookup2Deferred> promiseLookup1 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
new String[]{ knownGbids[1] });
DiscoveryEntryWithMetaInfo[] result1 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup1,
"lookup failed")[0];
assertEquals(2, result1.length);
int actualEntry1 = expectedEntry1.getParticipantId().equals(result1[0].getParticipantId()) ? 0 : 1;
int actualEntry2 = (actualEntry1 + 1) % 2;
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry1, result1[actualEntry1]));
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry2, result1[actualEntry2]));
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedEntry1.getParticipantId()));
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedEntry2.getParticipantId()));
Promise<Lookup2Deferred> promiseLookup2 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
new String[]{ knownGbids[0] });
DiscoveryEntryWithMetaInfo[] result2 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup2,
"lookup failed")[0];
assertEquals(1, result2.length);
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry1, result2[0]));
verify(routingTable, times(2)).incrementReferenceCount(eq(expectedEntry1.getParticipantId()));
Promise<Lookup2Deferred> promiseLookup3 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
knownGbids);
DiscoveryEntryWithMetaInfo[] result3 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup3,
"lookup failed")[0];
assertEquals(2, result3.length);
actualEntry1 = expectedEntry1.getParticipantId().equals(result3[0].getParticipantId()) ? 0 : 1;
actualEntry2 = (actualEntry1 + 1) % 2;
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry1, result3[actualEntry1]));
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry2, result3[actualEntry2]));
verify(routingTable, times(3)).incrementReferenceCount(eq(expectedEntry1.getParticipantId()));
verify(routingTable, times(2)).incrementReferenceCount(eq(expectedEntry2.getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_globalOnly_filtersLocalEntriesByGbids() throws InterruptedException {
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
DiscoveryEntry localEntry = new DiscoveryEntry(new Version(47, 11),
"domain",
"interfaceName",
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
DiscoveryEntry localStoreEntry = new DiscoveryEntry(localEntry);
DiscoveryEntryWithMetaInfo expectedLocalEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry);
// register in knownGbids[1]
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(localEntry,
true,
new String[]{ knownGbids[1] });
checkPromiseSuccess(promiseAdd, "add failed");
reset((Object) localDiscoveryEntryStoreMock,
(Object) globalDiscoveryEntryCacheMock,
(Object) globalCapabilitiesDirectoryClient);
doReturn(Optional.of(localStoreEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(expectedLocalEntry.getParticipantId()), eq(Long.MAX_VALUE));
// lookup knownGbids[1], expect local entry
Promise<Lookup4Deferred> promiseLookup1 = localCapabilitiesDirectory.lookup(expectedLocalEntry.getParticipantId(),
discoveryQos,
new String[]{ knownGbids[1] });
DiscoveryEntryWithMetaInfo result1 = (DiscoveryEntryWithMetaInfo) checkPromiseSuccess(promiseLookup1,
"lookup failed")[0];
verify(localDiscoveryEntryStoreMock, times(1)).lookup(eq(expectedLocalEntry.getParticipantId()),
eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
assertEquals(expectedLocalEntry, result1);
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedLocalEntry.getParticipantId()));
// lookup knownGbids[0], expect DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS
Promise<Lookup4Deferred> promiseLookup2 = localCapabilitiesDirectory.lookup(expectedLocalEntry.getParticipantId(),
discoveryQos,
new String[]{ knownGbids[0] });
checkPromiseError(promiseLookup2, DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
verify(localDiscoveryEntryStoreMock, times(2)).lookup(eq(expectedLocalEntry.getParticipantId()),
eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedLocalEntry.getParticipantId()));
// lookup all gbids, expect local entry
Promise<Lookup4Deferred> promiseLookup3 = localCapabilitiesDirectory.lookup(expectedLocalEntry.getParticipantId(),
discoveryQos,
knownGbids);
DiscoveryEntryWithMetaInfo result3 = (DiscoveryEntryWithMetaInfo) checkPromiseSuccess(promiseLookup3,
"lookup failed")[0];
verify(localDiscoveryEntryStoreMock, times(3)).lookup(eq(expectedLocalEntry.getParticipantId()),
eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
assertEquals(expectedLocalEntry, result3);
verify(routingTable, times(2)).incrementReferenceCount(eq(expectedLocalEntry.getParticipantId()));
verify(routingTable, never()).put(eq(expectedLocalEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
private void testLookupByDomainInterfaceWithGbids_globalOnly_allLocal(String[] gbidsForLookup,
DiscoveryEntry entryForGbid1,
DiscoveryEntry entryForGbid2,
DiscoveryEntry entryForGbid3,
DiscoveryEntry entryForGbid2And3,
Set<String> expectedParticipantIds) throws InterruptedException {
String[] domainsForLookup = new String[]{ discoveryEntry.getDomain() };
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
final boolean awaitGlobalRegistration = true;
Promise<Add1Deferred> promise1 = localCapabilitiesDirectory.add(entryForGbid1,
awaitGlobalRegistration,
new String[]{ knownGbids[0] });
checkPromiseSuccess(promise1, "add failed");
Promise<Add1Deferred> promise2 = localCapabilitiesDirectory.add(entryForGbid2,
awaitGlobalRegistration,
new String[]{ knownGbids[1] });
checkPromiseSuccess(promise2, "add failed");
Promise<Add1Deferred> promise3 = localCapabilitiesDirectory.add(entryForGbid3,
awaitGlobalRegistration,
new String[]{ knownGbids[2] });
checkPromiseSuccess(promise3, "add failed");
Promise<Add1Deferred> promise4 = localCapabilitiesDirectory.add(entryForGbid2And3,
awaitGlobalRegistration,
new String[]{ knownGbids[1], knownGbids[2] });
checkPromiseSuccess(promise4, "add failed");
doReturn(Arrays.asList(entryForGbid1,
entryForGbid2,
entryForGbid3,
entryForGbid2And3)).when(localDiscoveryEntryStoreMock)
.lookupGlobalEntries(eq(domainsForLookup), eq(INTERFACE_NAME));
doReturn(new ArrayList<GlobalDiscoveryEntry>()).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domainsForLookup), eq(INTERFACE_NAME), anyLong());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domainsForLookup,
INTERFACE_NAME,
discoveryQos,
gbidsForLookup);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] foundEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(expectedParticipantIds.size(), foundEntries.length);
Set<String> foundParticipantIds = new HashSet<>();
for (DiscoveryEntryWithMetaInfo foundEntry : foundEntries) {
foundParticipantIds.add(foundEntry.getParticipantId());
}
assertEquals(expectedParticipantIds, foundParticipantIds);
expectedParticipantIds.forEach((participantId) -> {
verify(routingTable, times(1)).incrementReferenceCount(eq(participantId));
verify(routingTable, never()).put(eq(participantId), any(Address.class), any(Boolean.class), anyLong());
});
reset((Object) routingTable);
verify(globalCapabilitiesDirectoryClient,
times(0)).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
any(String[].class),
anyString(),
anyLong(),
any(String[].class));
}
private void testLookupByDomainInterfaceWithGbids_globalOnly_noneLocalOrCached(String[] gbidsForLookup,
String[] expectedGbids) throws InterruptedException {
String[] domainsForLookup = new String[]{ discoveryEntry.getDomain() };
String[] expectedDomains = domainsForLookup.clone();
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
List<GlobalDiscoveryEntry> globalEntries = new ArrayList<>();
globalEntries.add(globalDiscoveryEntry);
DiscoveryEntry entry2 = new DiscoveryEntry(discoveryEntry);
entry2.setParticipantId("participantId2");
globalEntries.add(CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(entry2, globalAddressWithoutGbid));
doReturn(new ArrayList<GlobalDiscoveryEntry>()).when(globalDiscoveryEntryCacheMock)
.lookup(eq(expectedDomains), eq(INTERFACE_NAME), anyLong());
doAnswer(createLookupAnswer(globalEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(expectedDomains),
eq(INTERFACE_NAME),
eq(discoveryQos.getDiscoveryTimeout()),
eq(expectedGbids));
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domainsForLookup,
INTERFACE_NAME,
discoveryQos,
gbidsForLookup);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(expectedDomains),
eq(INTERFACE_NAME),
eq(discoveryQos.getDiscoveryTimeout()),
eq(expectedGbids));
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] foundEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(2, foundEntries.length);
Arrays.asList(foundEntries)
.forEach((entry) -> verify(routingTable, times(1)).put(eq(entry.getParticipantId()),
any(Address.class),
eq(true),
anyLong()));
verify(routingTable, never()).incrementReferenceCount(anyString());
reset((Object) routingTable);
}
@Test
public void testLookupByDomainInterfaceWithGbids_globalOnly_multipleGbids_allCached() throws InterruptedException {
String[] gbidsForLookup = new String[]{ knownGbids[0], knownGbids[2] };
DiscoveryEntry entryForGbid1 = new DiscoveryEntry(discoveryEntry);
DiscoveryEntry entryForGbid2 = new DiscoveryEntry(discoveryEntry);
entryForGbid2.setParticipantId("participantId2");
DiscoveryEntry entryForGbid3 = new DiscoveryEntry(discoveryEntry);
entryForGbid3.setParticipantId("participantId3");
DiscoveryEntry entryForGbid2And3 = new DiscoveryEntry(discoveryEntry);
entryForGbid2And3.setParticipantId("participantId4");
Set<String> expectedParticipantIds = new HashSet<>();
expectedParticipantIds.add(entryForGbid1.getParticipantId());
expectedParticipantIds.add(entryForGbid3.getParticipantId());
expectedParticipantIds.add(entryForGbid2And3.getParticipantId());
testLookupByDomainInterfaceWithGbids_globalOnly_allLocal(gbidsForLookup,
entryForGbid1,
entryForGbid2,
entryForGbid3,
entryForGbid2And3,
expectedParticipantIds);
}
@Test
public void testLookupByDomainInterfaceWithGbids_globalOnly_emptyGbidsArray_allCached() throws InterruptedException {
String[] gbidsForLookup = new String[0];
DiscoveryEntry entryForGbid1 = new DiscoveryEntry(discoveryEntry);
DiscoveryEntry entryForGbid2 = new DiscoveryEntry(discoveryEntry);
entryForGbid2.setParticipantId("participantId2");
DiscoveryEntry entryForGbid3 = new DiscoveryEntry(discoveryEntry);
entryForGbid3.setParticipantId("participantId3");
DiscoveryEntry entryForGbid2And3 = new DiscoveryEntry(discoveryEntry);
entryForGbid2And3.setParticipantId("participantId4");
Set<String> expectedParticipantIds = new HashSet<>();
expectedParticipantIds.add(entryForGbid1.getParticipantId());
expectedParticipantIds.add(entryForGbid2.getParticipantId());
expectedParticipantIds.add(entryForGbid3.getParticipantId());
expectedParticipantIds.add(entryForGbid2And3.getParticipantId());
testLookupByDomainInterfaceWithGbids_globalOnly_allLocal(gbidsForLookup,
entryForGbid1,
entryForGbid2,
entryForGbid3,
entryForGbid2And3,
expectedParticipantIds);
}
@Test
public void testLookupByDomainInterfaceWithGbids_globalOnly_multipleGbids_noneCached() throws InterruptedException {
String[] gbidsForLookup = new String[]{ knownGbids[0], knownGbids[2] };
testLookupByDomainInterfaceWithGbids_globalOnly_noneLocalOrCached(gbidsForLookup, gbidsForLookup.clone());
String[] gbidsForLookup2 = new String[]{ knownGbids[2], knownGbids[0] };
testLookupByDomainInterfaceWithGbids_globalOnly_noneLocalOrCached(gbidsForLookup2, gbidsForLookup2.clone());
}
@Test
public void testLookupByDomainInterfaceWithGbids_globalOnly_emptyGbidsArray_noneCached() throws InterruptedException {
String[] gbidsForLookup = new String[0];
testLookupByDomainInterfaceWithGbids_globalOnly_noneLocalOrCached(gbidsForLookup, knownGbids);
}
private void testLookupByParticipantIdWithGbids_globalOnly_allCached(String[] gbidsForLookup) throws InterruptedException {
String participantId = discoveryEntry.getParticipantId();
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
doReturn(Optional.of(globalDiscoveryEntry)).when(globalDiscoveryEntryCacheMock).lookup(eq(participantId),
anyLong());
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
gbidsForLookup);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo foundEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(participantId, foundEntry.getParticipantId());
verify(routingTable, never()).incrementReferenceCount(eq(globalDiscoveryEntry.getParticipantId()));
verify(routingTable, times(1)).put(eq(globalDiscoveryEntry.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
reset((Object) routingTable);
verify(globalCapabilitiesDirectoryClient,
times(0)).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any(String[].class));
}
private void testLookupByParticipantIdWithGbids_globalOnly_noneCached(String[] gbidsForLookup,
String[] expectedGbids) throws InterruptedException {
String participantId = discoveryEntry.getParticipantId();
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
doReturn(Optional.empty()).when(globalDiscoveryEntryCacheMock).lookup(eq(participantId), anyLong());
doAnswer(createLookupAnswer(globalDiscoveryEntry)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
eq(discoveryQos.getDiscoveryTimeout()),
eq(expectedGbids));
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
gbidsForLookup);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
eq(discoveryQos.getDiscoveryTimeout()),
eq(expectedGbids));
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo foundEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(participantId, foundEntry.getParticipantId());
verify(routingTable, times(1)).put(eq(foundEntry.getParticipantId()), any(Address.class), eq(true), anyLong());
verify(routingTable, never()).incrementReferenceCount(anyString());
reset((Object) routingTable);
}
@Test
public void testLookupByParticipantIdWithGbids_globalOnly_multipleGbids_allCached() throws InterruptedException {
String[] gbidsForLookup = new String[]{ knownGbids[0], knownGbids[2] };
testLookupByParticipantIdWithGbids_globalOnly_allCached(gbidsForLookup);
}
@Test
public void testLookupByParticipantIdWithGbids_globalOnly_emptyGbidsArray_allCached() throws InterruptedException {
String[] gbidsForLookup = new String[0];
testLookupByParticipantIdWithGbids_globalOnly_allCached(gbidsForLookup);
}
@Test
public void testLookupByParticipantIdWithGbids_globalOnly_multipleGbids_noneCached() throws InterruptedException {
String[] gbidsForLookup = new String[]{ knownGbids[0], knownGbids[2] };
testLookupByParticipantIdWithGbids_globalOnly_noneCached(gbidsForLookup, gbidsForLookup.clone());
String[] gbidsForLookup2 = new String[]{ knownGbids[2], knownGbids[0] };
testLookupByParticipantIdWithGbids_globalOnly_noneCached(gbidsForLookup2, gbidsForLookup2.clone());
}
@Test
public void testLookupByParticipantIdWithGbids_globalOnly_emptyGbidsArray_noneCached() throws InterruptedException {
String[] gbidsForLookup = new String[0];
testLookupByParticipantIdWithGbids_globalOnly_noneCached(gbidsForLookup, knownGbids);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_localOnly() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_ONLY);
Collection<DiscoveryEntry> entries = Arrays.asList(new DiscoveryEntry(new Version(0, 0),
"domain1",
interfaceName,
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
interfaceName),
new DiscoveryEntry(new Version(0, 0),
"domain2",
interfaceName,
"participantId2",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
interfaceName));
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(entries);
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
Object[] values = checkPromiseSuccess(promise, "lookup failed");
assertEquals(2, ((DiscoveryEntryWithMetaInfo[]) values[0]).length);
entries.forEach((entry) -> {
verify(routingTable, times(1)).incrementReferenceCount(eq(entry.getParticipantId()));
verify(routingTable, never()).put(eq(entry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
});
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_globalOnly_noneCached() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(new ArrayList<GlobalDiscoveryEntry>());
doAnswer(createLookupAnswer(new ArrayList<GlobalDiscoveryEntry>())).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
0);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_globalOnly_allCached() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
List<GlobalDiscoveryEntry> entries = new ArrayList<>();
for (String domain : domains) {
GlobalDiscoveryEntry entry = new GlobalDiscoveryEntry();
entry.setParticipantId("participantIdFor-" + domain);
entry.setDomain(domain);
entry.setAddress(globalAddress1Serialized);
entries.add(entry);
}
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(entries);
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(0,
domains,
interfaceName,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 2 cached entries
entries.forEach((entry) -> {
verify(routingTable, never()).incrementReferenceCount(eq(entry.getParticipantId()));
verify(routingTable, times(1)).put(eq(entry.getParticipantId()), any(Address.class), eq(true), anyLong());
});
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_globalOnly_allLocalGlobal() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
List<DiscoveryEntry> entries = new ArrayList<>();
List<Promise<Add1Deferred>> promises = new ArrayList<>();
for (String domain : domains) {
DiscoveryEntry entry = new GlobalDiscoveryEntry();
entry.setParticipantId("participantIdFor-" + domain);
entry.setDomain(domain);
entries.add(entry);
promises.add(localCapabilitiesDirectory.add(entry, true, knownGbids));
}
promises.forEach(promise -> {
try {
checkPromiseSuccess(promise, "addFailed");
} catch (InterruptedException e) {
fail("add failed: " + e);
}
});
doReturn(entries).when(localDiscoveryEntryStoreMock).lookupGlobalEntries(eq(domains), eq(interfaceName));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(0,
domains,
interfaceName,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 2 cached entries
entries.forEach((entry) -> {
verify(routingTable, times(1)).incrementReferenceCount(eq(entry.getParticipantId()));
verify(routingTable, never()).put(eq(entry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
});
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_globalOnly_oneCached() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
GlobalDiscoveryEntry entry = new GlobalDiscoveryEntry();
entry.setParticipantId("participantId1");
entry.setInterfaceName(interfaceName);
entry.setDomain(domains[0]);
entry.setAddress(globalAddress1Serialized);
doReturn(Arrays.asList(entry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAge()));
doReturn(Optional.of(entry)).when(globalDiscoveryEntryCacheMock).lookup(eq(entry.getParticipantId()),
eq(Long.MAX_VALUE));
doAnswer(createLookupAnswer(new ArrayList<GlobalDiscoveryEntry>())).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1);
verify(routingTable, never()).incrementReferenceCount(anyString());
verify(routingTable, times(1)).put(eq(entry.getParticipantId()), any(Address.class), eq(true), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupMultipleDomains_localThenGlobal_oneLocalGlobalOneCached_sameParticipantIdsRemote() throws InterruptedException {
String localDomain = "localDomain";
String cachedDomain = "cachedDomain";
String remoteDomain = "remoteDomain";
String[] domains = new String[]{ localDomain, cachedDomain, remoteDomain };
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
// local entry for participantId1 and domain1
discoveryEntry.setParticipantId("participantId1");
discoveryEntry.setDomain(localDomain);
doReturn(Arrays.asList(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(INTERFACE_NAME));
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(discoveryEntry.getParticipantId()), anyLong());
// cached entry for participantId2 for cachedDomain
GlobalDiscoveryEntry cachedRemoteEntry = new GlobalDiscoveryEntry();
cachedRemoteEntry.setParticipantId("participantId2");
cachedRemoteEntry.setInterfaceName(INTERFACE_NAME);
cachedRemoteEntry.setDomain(cachedDomain);
cachedRemoteEntry.setAddress(globalAddress1Serialized);
doReturn(Arrays.asList(cachedRemoteEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(INTERFACE_NAME),
eq(discoveryQos.getCacheMaxAge()));
// remote entries for local provider and for remoteDomain for participantIds 2 and 3
GlobalDiscoveryEntry remoteEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry,
globalAddressWithoutGbid);
remoteEntry1.setDomain(remoteDomain);
GlobalDiscoveryEntry remoteEntry2 = new GlobalDiscoveryEntry(cachedRemoteEntry);
remoteEntry2.setDomain(remoteDomain);
remoteEntry2.setAddress(globalAddressWithoutGbidSerialized);
GlobalDiscoveryEntry remoteEntry3 = new GlobalDiscoveryEntry(cachedRemoteEntry);
remoteEntry3.setParticipantId("participantId3");
remoteEntry3.setDomain(remoteDomain);
remoteEntry3.setAddress(globalAddressWithoutGbidSerialized);
doAnswer(createLookupAnswer(Arrays.asList(remoteEntry1,
remoteEntry2,
remoteEntry3))).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, INTERFACE_NAME, discoveryQos);
verify(localDiscoveryEntryStoreMock).lookup(org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(INTERFACE_NAME));
verify(globalDiscoveryEntryCacheMock).lookup(org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(INTERFACE_NAME),
eq(discoveryQos.getCacheMaxAge()));
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Object[] values = verifyGcdLookupAndPromiseFulfillment(1,
domains,
INTERFACE_NAME,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
3);
DiscoveryEntryWithMetaInfo[] result = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(3, result.length);
boolean discoveryEntryFound = false;
boolean remoteEntry2Found = false;
boolean remoteEntry3Found = false;
for (DiscoveryEntryWithMetaInfo entry : result) {
if (entry.getParticipantId() == discoveryEntry.getParticipantId() && entry.getDomain().equals(localDomain)
&& entry.getIsLocal()) {
discoveryEntryFound = true;
}
if (entry.getParticipantId() == remoteEntry2.getParticipantId() && entry.getDomain().equals(remoteDomain)
&& !entry.getIsLocal()) {
remoteEntry2Found = true;
}
if (entry.getParticipantId() == remoteEntry3.getParticipantId() && entry.getDomain().equals(remoteDomain)
&& !entry.getIsLocal()) {
remoteEntry3Found = true;
}
}
verify(globalDiscoveryEntryCacheMock, never()).add(remoteEntry1);
verify(globalDiscoveryEntryCacheMock).add(remoteEntry2);
verify(globalDiscoveryEntryCacheMock).add(remoteEntry3);
verify(routingTable, never()).put(eq(remoteEntry1.getParticipantId()),
eq(globalAddressWithoutGbid),
eq(true),
anyLong());
verify(routingTable, times(1)).incrementReferenceCount(eq(remoteEntry1.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(remoteEntry2.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(remoteEntry3.getParticipantId()));
verify(routingTable).put(eq(remoteEntry2.getParticipantId()),
eq(globalAddressWithoutGbid),
eq(true),
anyLong());
verify(routingTable).put(eq(remoteEntry3.getParticipantId()),
eq(globalAddressWithoutGbid),
eq(true),
anyLong());
assertTrue(discoveryEntryFound && remoteEntry2Found && remoteEntry3Found);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceGbids_localAndGlobal_localGlobalEntry_invokesGcd_filtersCombinedResult() throws Exception {
String[] domains = { discoveryEntry.getDomain() };
List<DiscoveryEntry> localDiscoveryEntries = Arrays.asList(discoveryEntry);
List<GlobalDiscoveryEntry> globalDiscoveryEntries = Arrays.asList(globalDiscoveryEntry);
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge,
discoveryTimeout,
DiscoveryScope.LOCAL_AND_GLOBAL,
false);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(INTERFACE_NAME))).thenReturn(localDiscoveryEntries);
doAnswer(createLookupAnswer(globalDiscoveryEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
any());
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(discoveryEntry.getParticipantId()), anyLong());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domains,
INTERFACE_NAME,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] capturedDiscoveryEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(1, capturedDiscoveryEntries.length);
assertTrue(capturedDiscoveryEntries[0].getIsLocal());
verify(globalCapabilitiesDirectoryClient).lookup(any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryTimeout),
any());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(routingTable, times(1)).incrementReferenceCount(eq(capturedDiscoveryEntries[0].getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
verify(localDiscoveryEntryStoreMock).lookup(eq(domains), eq(INTERFACE_NAME));
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceGbids_globalOnly_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry() throws Exception {
// test assumes that the local entry is registered after the global lookup has been triggered
String[] domains = { discoveryEntry.getDomain() };
List<GlobalDiscoveryEntry> globalDiscoveryEntries = Arrays.asList(globalDiscoveryEntry);
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.GLOBAL_ONLY, false);
doAnswer(createLookupAnswer(globalDiscoveryEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
any());
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(discoveryEntry.getParticipantId()), anyLong());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domains,
INTERFACE_NAME,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] capturedDiscoveryEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(1, capturedDiscoveryEntries.length);
assertEquals(CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true, discoveryEntry),
capturedDiscoveryEntries[0]);
verify(globalCapabilitiesDirectoryClient).lookup(any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryTimeout),
any());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(routingTable, times(1)).incrementReferenceCount(eq(capturedDiscoveryEntries[0].getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
verify(localDiscoveryEntryStoreMock).lookup(eq(discoveryEntry.getParticipantId()), anyLong());
}
@Test
public void lookupDomIntf_globalOnlyWithCache_localGlobalEntryNoCachedEntry_doesNotInvokeGcd() throws Exception {
final long cacheMaxAge = 1L;
final long discoveryTimeout = 5000L;
final String[] domains = new String[]{ discoveryEntry.getDomain() };
final String interfaceName = discoveryEntry.getInterfaceName();
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.GLOBAL_ONLY, false);
// register in all gbids
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
reset(localDiscoveryEntryStoreMock, globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
doReturn(new HashSet<>(Arrays.asList(discoveryEntry))).when(localDiscoveryEntryStoreMock)
.lookupGlobalEntries(eq(domains), eq(interfaceName));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
DiscoveryEntryWithMetaInfo[] values = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promise,
"lookup failed")[0];
assertEquals(1, values.length);
assertEquals(true, values[0].getIsLocal());
verify(routingTable, times(1)).incrementReferenceCount(eq(discoveryEntry.getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
}
@Test
public void lookupDomIntf_globalOnlyNoCache_localGlobalEntryNoCachedEntry_invokesGcd() throws Exception {
final long cacheMaxAge = 0L;
final long discoveryTimeout = 5000L;
final String[] domains = new String[]{ discoveryEntry.getDomain() };
final String interfaceName = discoveryEntry.getInterfaceName();
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.GLOBAL_ONLY, false);
// register in all gbids
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
reset(localDiscoveryEntryStoreMock, globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
doAnswer(createLookupAnswer(new ArrayList<GlobalDiscoveryEntry>())).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
any());
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
DiscoveryEntryWithMetaInfo[] values = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promise,
"lookup failed")[0];
assertEquals(0, values.length);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdGbids_globalOnlyWithCache_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry() throws Exception {
// test assumes that the local entry is registered after the global lookup has been triggered
lookupByParticipantIdGbids_globalOnly_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry(10000L);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdGbids_globalOnlyNoCache_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry() throws Exception {
// test assumes that the local entry is registered after the global lookup has been triggered
lookupByParticipantIdGbids_globalOnly_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry(0L);
}
private void lookupByParticipantIdGbids_globalOnly_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry(long cacheMaxAge) throws Exception {
// test assumes that the local entry is registered after the global lookup has been triggered
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.GLOBAL_ONLY, false);
doAnswer(createLookupAnswer(globalDiscoveryEntry)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any());
doAnswer(new Answer<Optional<DiscoveryEntry>>() {
// simulate provider registration after remote lookup has been triggered
boolean firstCall = true;
@Override
public Optional<DiscoveryEntry> answer(InvocationOnMock invocation) throws Throwable {
if (firstCall) {
firstCall = false;
return Optional.empty();
}
return Optional.of(discoveryEntry);
}
}).when(localDiscoveryEntryStoreMock).lookup(eq(discoveryEntry.getParticipantId()), anyLong());
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(discoveryEntry.getParticipantId(),
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedDiscoveryEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertTrue(capturedDiscoveryEntry.getIsLocal());
verify(globalCapabilitiesDirectoryClient).lookup(any(),
eq(discoveryEntry.getParticipantId()),
eq(discoveryTimeout),
any());
verify(localDiscoveryEntryStoreMock, times(2)).lookup(eq(discoveryEntry.getParticipantId()), anyLong());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(routingTable, times(1)).incrementReferenceCount(eq(discoveryEntry.getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_localThenGlobal() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2", "domain3" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setParticipantId("participantIdLocal");
localEntry.setDomain(domains[0]);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(Arrays.asList(localEntry));
GlobalDiscoveryEntry globalEntry = new GlobalDiscoveryEntry();
globalEntry.setParticipantId("participantIdCached");
globalEntry.setInterfaceName(interfaceName);
globalEntry.setDomain(domains[1]);
globalEntry.setAddress(globalAddress1Serialized);
doReturn(Arrays.asList(globalEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAge()));
doReturn(Optional.of(globalEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(globalEntry.getParticipantId()), eq(Long.MAX_VALUE));
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
domains[2],
interfaceName,
"participantIdRemote",
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
globalAddress1Serialized);
doAnswer(createLookupAnswer(Arrays.asList(remoteGlobalEntry))).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Object[] values = checkPromiseSuccess(promise, "lookup failed");
Collection<DiscoveryEntry> captured = CapabilityUtils.convertToDiscoveryEntrySet(Arrays.asList((DiscoveryEntryWithMetaInfo[]) values[0]));
assertNotNull(captured);
assertEquals(3, captured.size());
assertTrue(captured.contains(localEntry));
assertTrue(captured.contains(new DiscoveryEntry(globalEntry)));
assertTrue(captured.contains(new DiscoveryEntry(remoteGlobalEntry)));
verify(routingTable, times(1)).incrementReferenceCount(eq(localEntry.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(globalEntry.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(remoteGlobalEntry.getParticipantId()));
verify(routingTable, never()).put(eq(localEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
verify(routingTable, times(1)).put(eq(globalEntry.getParticipantId()), any(Address.class), eq(true), anyLong());
verify(routingTable, times(1)).put(eq(remoteGlobalEntry.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupMultipleDomains_localThenGlobal_oneLocalAllCachedDomains_returnsLocalAndCachedEntries() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setParticipantId("participantIdLocal");
localEntry.setDomain(domains[0]);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(Arrays.asList(localEntry));
GlobalDiscoveryEntry globalCachedEntry1 = new GlobalDiscoveryEntry();
globalCachedEntry1.setParticipantId("participantIdCached1");
globalCachedEntry1.setInterfaceName(interfaceName);
globalCachedEntry1.setDomain(domains[0]);
globalCachedEntry1.setAddress(globalAddress1Serialized);
GlobalDiscoveryEntry globalCachedEntry2 = new GlobalDiscoveryEntry();
globalCachedEntry2.setParticipantId("participantIdCached2");
globalCachedEntry2.setInterfaceName(interfaceName);
globalCachedEntry2.setDomain(domains[1]);
globalCachedEntry2.setAddress(globalAddress1Serialized);
Set<GlobalDiscoveryEntry> globalCachedEntries = new HashSet<GlobalDiscoveryEntry>(Arrays.asList(globalCachedEntry1,
globalCachedEntry2));
doReturn(globalCachedEntries).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAge()));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
Object[] values = checkPromiseSuccess(promise, "lookup failed");
verify(localDiscoveryEntryStoreMock).lookup(eq(domains), eq(interfaceName));
verify(globalDiscoveryEntryCacheMock).lookup(eq(domains), eq(interfaceName), eq(ONE_DAY_IN_MS));
verify(globalCapabilitiesDirectoryClient, never()).lookup(any(), anyString(), anyLong(), any());
Collection<DiscoveryEntry> captured = CapabilityUtils.convertToDiscoveryEntrySet(Arrays.asList((DiscoveryEntryWithMetaInfo[]) values[0]));
assertEquals(3, captured.size());
assertTrue(captured.contains(localEntry));
assertTrue(captured.contains(new DiscoveryEntry(globalCachedEntry1)));
assertTrue(captured.contains(new DiscoveryEntry(globalCachedEntry2)));
verify(routingTable, times(1)).incrementReferenceCount(eq(localEntry.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(globalCachedEntry1.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(globalCachedEntry2.getParticipantId()));
verify(routingTable, never()).put(eq(localEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
verify(routingTable, times(1)).put(eq(globalCachedEntry1.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, times(1)).put(eq(globalCachedEntry2.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupMultipleDomains_localThenGlobal_allDomainsLocal_returnsOnlyLocalEntries() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
DiscoveryEntry localEntry1 = new DiscoveryEntry();
localEntry1.setParticipantId("participantIdLocal1");
localEntry1.setDomain(domains[0]);
DiscoveryEntry localEntry2 = new DiscoveryEntry();
localEntry2.setParticipantId("participantIdLocal2");
localEntry2.setDomain(domains[1]);
when(localDiscoveryEntryStoreMock.lookup(eq(domains),
eq(interfaceName))).thenReturn(Arrays.asList(localEntry1,
localEntry2));
GlobalDiscoveryEntry globalCachedEntry = new GlobalDiscoveryEntry();
globalCachedEntry.setParticipantId("participantIdCached1");
globalCachedEntry.setInterfaceName(interfaceName);
globalCachedEntry.setDomain(domains[0]);
globalCachedEntry.setAddress(globalAddress1Serialized);
doReturn(Arrays.asList(globalCachedEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
Object[] values = checkPromiseSuccess(promise, "lookup failed");
verify(localDiscoveryEntryStoreMock).lookup(eq(domains), eq(interfaceName));
verify(globalDiscoveryEntryCacheMock).lookup(eq(domains), eq(interfaceName), eq(ONE_DAY_IN_MS));
verify(globalCapabilitiesDirectoryClient, never()).lookup(any(), anyString(), anyLong(), any());
Collection<DiscoveryEntry> captured = CapabilityUtils.convertToDiscoveryEntrySet(Arrays.asList((DiscoveryEntryWithMetaInfo[]) values[0]));
assertEquals(2, captured.size());
assertTrue(captured.contains(localEntry1));
assertTrue(captured.contains(localEntry2));
assertFalse(captured.contains(new DiscoveryEntry(globalCachedEntry)));
captured.forEach((entry) -> {
verify(routingTable, times(1)).incrementReferenceCount(eq(entry.getParticipantId()));
verify(routingTable, never()).put(eq(entry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
});
verify(routingTable, never()).put(eq(globalCachedEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantId_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue_localEntry() throws Exception {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos(Long.MAX_VALUE, Long.MAX_VALUE, DiscoveryScope.LOCAL_ONLY, false);
// local DiscoveryEntry
String localDomain = "localDomain";
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setDomain(localDomain);
localEntry.setInterfaceName(interfaceName);
localEntry.setParticipantId(participantId);
DiscoveryEntryWithMetaInfo localEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry);
when(localDiscoveryEntryStoreMock.lookup(eq(participantId),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Optional.of(localEntry));
Promise<Lookup3Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedLocalEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(localEntryWithMetaInfo, capturedLocalEntry);
verify(routingTable, times(1)).incrementReferenceCount(eq(capturedLocalEntry.getParticipantId()));
verify(routingTable, never()).put(eq(capturedLocalEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantId_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue_cachedEntry() throws Exception {
String participantId = discoveryEntry.getParticipantId();
String interfaceName = "interfaceName";
// cached global DiscoveryEntry
String globalDomain = "globalDomain";
GlobalDiscoveryEntry cachedGlobalEntry = new GlobalDiscoveryEntry();
cachedGlobalEntry.setDomain(globalDomain);
cachedGlobalEntry.setInterfaceName(interfaceName);
cachedGlobalEntry.setParticipantId(participantId);
cachedGlobalEntry.setAddress(globalAddress1Serialized);
DiscoveryEntryWithMetaInfo cachedGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedGlobalEntry);
when(globalDiscoveryEntryCacheMock.lookup(eq(participantId),
eq(Long.MAX_VALUE))).thenReturn(Optional.of(cachedGlobalEntry));
Promise<Lookup3Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedCachedGlobalEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(cachedGlobalEntryWithMetaInfo, capturedCachedGlobalEntry);
verify(routingTable, never()).incrementReferenceCount(eq(capturedCachedGlobalEntry.getParticipantId()));
verify(routingTable, times(1)).put(eq(capturedCachedGlobalEntry.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localOnly_noLocalEntry_doesNotInvokeGcd_returnsNoEntryForParticipant() throws Exception {
String participantId = discoveryEntry.getParticipantId();
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.LOCAL_ONLY, false);
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
new String[0]);
checkPromiseError(lookupPromise, DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verify(globalCapabilitiesDirectoryClient,
never()).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localOnly_localGlobalEntry_doesNotInvokeGcd_returnsLocalEntry() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_ONLY;
boolean localEntryAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localThenGlobal_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_THEN_GLOBAL;
boolean localEntryAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localThenGlobal_localGlobalEntry_doesNotInvokeGcd_returnsLocalEntry() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_THEN_GLOBAL;
boolean localEntryAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localAndGlobal_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_AND_GLOBAL;
boolean localEntryAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localAndGlobal_localGlobalEntry_doesNotInvokeGcd_returnsLocalEntry() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_AND_GLOBAL;
boolean localEntryAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_globalOnly_localGlobalEntry_doesNotInvokeGcd_returnsLocalResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.GLOBAL_ONLY;
boolean localEntryAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_globalOnly_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.GLOBAL_ONLY;
boolean localEntryAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
private void lookupByParticipantIdDiscoveryScopeTest(DiscoveryScope discoveryScope,
boolean localEntryAvalaible,
boolean invokesGcd,
boolean returnsLocalEntry) throws Exception {
String participantId = discoveryEntry.getParticipantId();
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, discoveryScope, false);
if (localEntryAvalaible) {
// register in all gbids
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(eq(participantId),
eq(Long.MAX_VALUE));
}
doAnswer(createLookupAnswer(globalDiscoveryEntry)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any());
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedDiscoveryEntry = (DiscoveryEntryWithMetaInfo) values[0];
if (invokesGcd) {
verify(globalCapabilitiesDirectoryClient).lookup(any(), eq(participantId), eq(discoveryTimeout), any());
} else {
verify(globalCapabilitiesDirectoryClient, never()).lookup(any(), anyString(), anyLong(), any());
}
if (returnsLocalEntry) {
DiscoveryEntryWithMetaInfo expectedLocalDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
discoveryEntry);
assertEquals(expectedLocalDiscoveryEntry, capturedDiscoveryEntry);
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedLocalDiscoveryEntry.getParticipantId()));
verify(routingTable, never()).put(eq(expectedLocalDiscoveryEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
} else {
DiscoveryEntryWithMetaInfo expectedGlobalDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
globalDiscoveryEntry);
assertEquals(expectedGlobalDiscoveryEntry, capturedDiscoveryEntry);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, times(1)).put(eq(expectedGlobalDiscoveryEntry.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_globalOnly_localOnlyEntry_doesNotInvokeGcd_noEntryForParticipant() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.GLOBAL_ONLY;
String participantId = discoveryEntry.getParticipantId();
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, discoveryScope, false);
// register local only
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
reset(localDiscoveryEntryStoreMock, globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(eq(participantId),
eq(Long.MAX_VALUE));
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
new String[0]);
checkPromiseError(lookupPromise, DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verify(localDiscoveryEntryStoreMock).lookup(eq(participantId), eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localOnly_noLocalEntry_doesNotInvokeGcd_returnsEmptyArray() throws Exception {
String[] domains = { discoveryEntry.getDomain() };
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.LOCAL_ONLY, false);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(INTERFACE_NAME))).thenReturn(new ArrayList<>());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domains,
INTERFACE_NAME,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] capturedDiscoveryEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(0, capturedDiscoveryEntries.length);
verify(globalCapabilitiesDirectoryClient,
never()).lookup(any(), any(String[].class), anyString(), anyLong(), any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localOnly_localEntries_doesNotInvokeGcd_returnsLocalEntries() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_ONLY;
boolean localEntriesAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByDomainInterfaceDiscoveryScopeTest(discoveryScope, localEntriesAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localThenGlobal_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_THEN_GLOBAL;
boolean localEntriesAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByDomainInterfaceDiscoveryScopeTest(discoveryScope, localEntriesAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localThenGlobal_localEntries_doesNotInvokeGcd_returnsLocalEntries() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_THEN_GLOBAL;
boolean localEntriesAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByDomainInterfaceDiscoveryScopeTest(discoveryScope, localEntriesAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localAndGlobal_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_AND_GLOBAL;
boolean localEntriesAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByDomainInterfaceDiscoveryScopeTest(discoveryScope, localEntriesAvailable, invokesGcd, returnsLocalEntry);
}
private void lookupByDomainInterfaceDiscoveryScopeTest(DiscoveryScope discoveryScope,
boolean localEntriesAvailable,
boolean invokesGcd,
boolean returnsLocalEntry) throws Exception {
String[] domains = { discoveryEntry.getDomain() };
List<DiscoveryEntry> discoveryEntries = Arrays.asList(discoveryEntry);
List<GlobalDiscoveryEntry> globalDiscoveryEntries = Arrays.asList(globalDiscoveryEntry);
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, discoveryScope, false);
if (localEntriesAvailable) {
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(INTERFACE_NAME))).thenReturn(discoveryEntries);
}
doAnswer(createLookupAnswer(globalDiscoveryEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
any());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domains,
INTERFACE_NAME,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] capturedDiscoveryEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
if (invokesGcd) {
verify(globalCapabilitiesDirectoryClient).lookup(any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryTimeout),
any());
} else {
verify(globalCapabilitiesDirectoryClient,
never()).lookup(any(), any(String[].class), anyString(), anyLong(), any());
}
if (returnsLocalEntry) {
DiscoveryEntryWithMetaInfo expectedLocalDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
discoveryEntry);
assertEquals(expectedLocalDiscoveryEntry, capturedDiscoveryEntries[0]);
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedLocalDiscoveryEntry.getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
} else {
DiscoveryEntryWithMetaInfo expectedGlobalDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
globalDiscoveryEntry);
assertEquals(expectedGlobalDiscoveryEntry, capturedDiscoveryEntries[0]);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, times(1)).put(eq(expectedGlobalDiscoveryEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_respectsCacheMaxAge() throws Exception {
String participantId = discoveryEntry.getParticipantId();
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge,
discoveryTimeout,
DiscoveryScope.LOCAL_AND_GLOBAL,
false);
localCapabilitiesDirectory.lookup(participantId, discoveryQos, new String[0]);
verify(globalDiscoveryEntryCacheMock).lookup(eq(participantId), eq(cacheMaxAge));
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_respectsCacheMaxAge() throws Exception {
String[] domains = { discoveryEntry.getDomain() };
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge,
discoveryTimeout,
DiscoveryScope.LOCAL_AND_GLOBAL,
false);
localCapabilitiesDirectory.lookup(domains, INTERFACE_NAME, discoveryQos, new String[0]);
verify(globalDiscoveryEntryCacheMock).lookup(eq(domains), eq(INTERFACE_NAME), eq(cacheMaxAge));
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantId_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue_globalEntry() throws Exception {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
// remote global DiscoveryEntry
String remoteGlobalDomain = "remoteglobaldomain";
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
remoteGlobalDomain,
interfaceName,
participantId,
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
globalAddress1Serialized);
DiscoveryEntryWithMetaInfo remoteGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
remoteGlobalEntry);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
Callback<GlobalDiscoveryEntry> callback = (Callback<GlobalDiscoveryEntry>) invocation.getArguments()[0];
callback.onSuccess(remoteGlobalEntry);
return null;
}
}).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
eq(knownGbids));
Promise<Lookup3Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedRemoteGlobalEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(remoteGlobalEntryWithMetaInfo, capturedRemoteGlobalEntry);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, times(1)).put(eq(remoteGlobalEntryWithMetaInfo.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantId_DiscoveryQosTtlIsUsed() throws Exception {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
long discoveryTimeout = 1000000000;
discoveryQos.setDiscoveryTimeout(discoveryTimeout);
// remote global DiscoveryEntry
String remoteGlobalDomain = "remoteglobaldomain";
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
remoteGlobalDomain,
interfaceName,
participantId,
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
globalAddress1Serialized);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
Callback<GlobalDiscoveryEntry> callback = (Callback<GlobalDiscoveryEntry>) invocation.getArguments()[0];
callback.onSuccess(remoteGlobalEntry);
return null;
}
}).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
eq(knownGbids));
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
new String[0]);
checkPromiseSuccess(lookupPromise, "lookup failed");
verify(globalCapabilitiesDirectoryClient).lookup(any(), eq(participantId), eq(discoveryTimeout), any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, times(1)).put(eq(participantId), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterface_DiscoveryEntriesWithMetaInfoContainExpectedIsLocalValue_localCachedAndGlobalEntries() throws InterruptedException {
String globalDomain = "globaldomain";
String remoteGlobalDomain = "remoteglobaldomain";
String[] domains = new String[]{ "localdomain", globalDomain, remoteGlobalDomain };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
// local DiscoveryEntry
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setParticipantId("participantIdLocal");
localEntry.setDomain(domains[0]);
DiscoveryEntryWithMetaInfo localEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(Arrays.asList(localEntry));
// cached global DiscoveryEntry
GlobalDiscoveryEntry cachedGlobalEntry = new GlobalDiscoveryEntry();
cachedGlobalEntry.setParticipantId("participantIdCached");
cachedGlobalEntry.setInterfaceName(interfaceName);
cachedGlobalEntry.setDomain(globalDomain);
cachedGlobalEntry.setAddress(globalAddress1Serialized);
DiscoveryEntryWithMetaInfo cachedGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedGlobalEntry);
doReturn(Arrays.asList(cachedGlobalEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()));
doReturn(Optional.of(cachedGlobalEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(cachedGlobalEntry.getParticipantId()), eq(Long.MAX_VALUE));
// remote global DiscoveryEntry
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
remoteGlobalDomain,
interfaceName,
"participantIdRemote",
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
globalAddress1Serialized);
DiscoveryEntryWithMetaInfo remoteGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
remoteGlobalEntry);
doAnswer(createLookupAnswer(Arrays.asList(remoteGlobalEntry))).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
Object[] values = checkPromiseSuccess(promise, "lookup failed");
List<DiscoveryEntryWithMetaInfo> capabilities = Arrays.asList((DiscoveryEntryWithMetaInfo[]) values[0]);
assertEquals(3, capabilities.size());
assertTrue(capabilities.contains(localEntryWithMetaInfo));
verify(routingTable, times(1)).incrementReferenceCount(eq(localEntryWithMetaInfo.getParticipantId()));
verify(routingTable, never()).put(eq(localEntryWithMetaInfo.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
assertTrue(capabilities.contains(cachedGlobalEntryWithMetaInfo));
verify(routingTable, never()).incrementReferenceCount(eq(cachedGlobalEntryWithMetaInfo.getParticipantId()));
verify(routingTable, times(1)).put(eq(cachedGlobalEntryWithMetaInfo.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
assertTrue(capabilities.contains(remoteGlobalEntryWithMetaInfo));
verify(routingTable, never()).incrementReferenceCount(eq(remoteGlobalEntryWithMetaInfo.getParticipantId()));
verify(routingTable, times(1)).put(eq(remoteGlobalEntryWithMetaInfo.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_exception() throws InterruptedException {
String domain = "domain";
String[] domains = new String[]{ domain };
String interfaceName = "interface";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
JoynrRuntimeException exception = new JoynrRuntimeException("lookup failed");
ProviderRuntimeException expectedException = new ProviderRuntimeException(exception.toString());
doAnswer(createVoidAnswerWithException(exception)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup2Deferred> promise = localCapabilitiesDirectory.lookup(domains,
interfaceName,
discoveryQos,
knownGbids);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
checkPromiseException(promise, expectedException);
}
private void testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String domain = "domain";
String[] domains = new String[]{ domain };
String interfaceName = "interface";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup2Deferred> promise = localCapabilitiesDirectory.lookup(domains,
interfaceName,
discoveryQos,
knownGbids);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
checkPromiseError(promise, expectedError);
}
private void testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String domain = "domain";
String[] domains = new String[]{ domain };
String interfaceName = "interface";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
checkPromiseErrorInProviderRuntimeException(promise, expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceIsProperlyRejected_invalidGbid() throws InterruptedException {
testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceIsProperlyRejected_unknownGbid() throws InterruptedException {
testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceIsProperlyRejected_internalError() throws InterruptedException {
testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceIsProperlyRejected_noEntryForSelectedBackend() throws InterruptedException {
testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_invalidGbid() throws InterruptedException {
testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_unknownGbid() throws InterruptedException {
testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_internalError() throws InterruptedException {
testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_noEntryForSelectedBackend() throws InterruptedException {
testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_exception() throws InterruptedException {
String participantId = "participantId";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
JoynrRuntimeException exception = new JoynrRuntimeException("lookup failed");
ProviderRuntimeException expectedException = new ProviderRuntimeException(exception.toString());
doAnswer(createVoidAnswerWithException(exception)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup4Deferred> promise = localCapabilitiesDirectory.lookup(participantId, discoveryQos, knownGbids);
checkPromiseException(promise, expectedException);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
private void testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String participantId = "participantId";
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
ArgumentMatchers.<String[]> any());
DiscoveryQos discoveryQos = new DiscoveryQos(10000L, 500L, DiscoveryScope.LOCAL_AND_GLOBAL, false);
Promise<Lookup4Deferred> promise = localCapabilitiesDirectory.lookup(participantId, discoveryQos, knownGbids);
checkPromiseError(promise, expectedError);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
private void testLookupByParticipantIdIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String participantId = "participantId";
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup3Deferred> promise = localCapabilitiesDirectory.lookup(participantId);
checkPromiseErrorInProviderRuntimeException(promise, expectedError);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_invalidGbid() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_unknownGbid() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_internalError() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_noEntryForSelectedBackend() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_noEntryForParticipant() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_invalidGbid() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_unknownGbid() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_internalError() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_noEntryForSelectedBackend() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_noEntryForParticipant() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_unknownGbids() throws InterruptedException {
String[] gbids = new String[]{ "not", "known" };
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_unknownGbids() throws InterruptedException {
String[] gbids = new String[]{ "not", "known" };
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_invalidGbid_emptyGbid() throws InterruptedException {
String[] gbids = new String[]{ "" };
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_invalidGbid_emptyGbid() throws InterruptedException {
String[] gbids = new String[]{ "" };
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_invalidGbid_duplicateGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], knownGbids[0], knownGbids[1] };
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_invalidGbid_duplicateGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], knownGbids[0], knownGbids[1] };
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_invalidGbid_nullGbid() throws InterruptedException {
String[] gbids = new String[]{ null };
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_invalidGbid_nullGbid() throws InterruptedException {
String[] gbids = new String[]{ null };
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_invalidGbid_nullGbidArray() throws InterruptedException {
String[] gbids = null;
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_invalidGbid_nullGbidArray() throws InterruptedException {
String[] gbids = null;
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
private void testLookupByDomainInterfaceWithDiscoveryError(String[] gbids,
DiscoveryError expectedError) throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
Promise<Lookup2Deferred> promise = localCapabilitiesDirectory.lookup(domains,
interfaceName,
discoveryQos,
gbids);
verify(globalCapabilitiesDirectoryClient,
never()).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
any(String[].class),
anyString(),
anyLong(),
any(String[].class));
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
checkPromiseError(promise, expectedError);
}
private void testLookupByParticipantIdWithDiscoveryError(String[] gbids,
DiscoveryError expectedError) throws InterruptedException {
String participantId = "participantId";
Promise<Lookup4Deferred> promise = localCapabilitiesDirectory.lookup(participantId, new DiscoveryQos(), gbids);
verify(globalCapabilitiesDirectoryClient,
never()).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any(String[].class));
checkPromiseError(promise, expectedError);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
private static void checkPromiseException(Promise<?> promise,
Exception expectedException) throws InterruptedException {
CountDownLatch countDownLatch = new CountDownLatch(1);
promise.then(new PromiseListener() {
@Override
public void onRejection(JoynrException exception) {
assertTrue(expectedException.getClass().isInstance(exception));
assertEquals(expectedException, exception);
countDownLatch.countDown();
}
@Override
public void onFulfillment(Object... values) {
fail("Unexpected fulfillment when expecting rejection.");
}
});
assertTrue(countDownLatch.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
}
private static void checkPromiseError(Promise<?> promise,
DiscoveryError exptectedError) throws InterruptedException {
CountDownLatch countDownLatch = new CountDownLatch(1);
promise.then(new PromiseListener() {
@Override
public void onRejection(JoynrException exception) {
if (exception instanceof ApplicationException) {
DiscoveryError error = ((ApplicationException) exception).getError();
assertEquals(exptectedError, error);
countDownLatch.countDown();
} else {
fail("Did not receive an ApplicationException on rejection.");
}
}
@Override
public void onFulfillment(Object... values) {
fail("Unexpected fulfillment when expecting rejection.");
}
});
assertTrue(countDownLatch.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
}
private static void checkPromiseErrorInProviderRuntimeException(Promise<?> promise,
DiscoveryError exptectedError) throws InterruptedException {
CountDownLatch countDownLatch = new CountDownLatch(1);
promise.then(new PromiseListener() {
@Override
public void onRejection(JoynrException exception) {
if (exception instanceof ProviderRuntimeException) {
assertTrue(((ProviderRuntimeException) exception).getMessage().contains(exptectedError.name()));
countDownLatch.countDown();
} else {
fail("Did not receive a ProviderRuntimeException on rejection.");
}
}
@Override
public void onFulfillment(Object... values) {
fail("Unexpected fulfillment when expecting rejection.");
}
});
assertTrue(countDownLatch.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
}
private static Object[] checkPromiseSuccess(Promise<? extends AbstractDeferred> promise,
String onRejectionMessage) throws InterruptedException {
ArrayList<Object> result = new ArrayList<>();
CountDownLatch countDownLatch = new CountDownLatch(1);
promise.then(new PromiseListener() {
@Override
public void onRejection(JoynrException error) {
fail(onRejectionMessage + ": " + error);
}
@Override
public void onFulfillment(Object... values) {
result.addAll(Arrays.asList(values));
countDownLatch.countDown();
}
});
assertTrue(onRejectionMessage + ": promise timeout",
countDownLatch.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
return result.toArray(new Object[result.size()]);
}
@Test(timeout = TEST_TIMEOUT)
public void remove_globallyRegistered_GcdCalled() throws InterruptedException {
when(globalAddressProvider.get()).thenReturn(new MqttAddress("testgbid", "testtopic"));
Boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> addPromise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(addPromise, "add failed");
CountDownLatch cdlStart = new CountDownLatch(1);
CountDownLatch cdlDone = new CountDownLatch(1);
doAnswer(createAnswerWithDelayedSuccess(cdlStart,
cdlDone,
1500)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(globalDiscoveryEntry.getParticipantId()),
any(String[].class));
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry));
localCapabilitiesDirectory.remove(globalDiscoveryEntry.getParticipantId());
assertTrue(cdlStart.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(discoveryEntry.getParticipantId()),
any(String[].class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(any(String.class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(any(String.class));
assertTrue(cdlDone.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(discoveryEntry.getParticipantId()));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
}
@Test(timeout = TEST_TIMEOUT)
public void remove_localProvider_GcdNotCalled() throws InterruptedException {
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
Boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> addPromise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(addPromise, "add failed");
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry));
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
Thread.sleep(500);
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(0)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
anyString(),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(discoveryEntry.getParticipantId()));
}
@Test(timeout = TEST_TIMEOUT)
public void remove_participantNotRegisteredNoGbids_GcdNotCalled() throws InterruptedException {
String participantId = "unknownparticipantId";
CountDownLatch cdl = new CountDownLatch(1);
doReturn(Optional.empty()).when(localDiscoveryEntryStoreMock).lookup(eq(participantId), anyLong());
localCapabilitiesDirectory.remove(participantId);
assertFalse(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
never()).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(String.class),
any(String[].class));
verify(localDiscoveryEntryStoreMock, never()).remove(any(String.class));
verify(globalDiscoveryEntryCacheMock, never()).remove(any(String.class));
}
@Test(timeout = TEST_TIMEOUT)
public void remove_participantNotRegisteredGbidsMapped_GcdCalled() throws InterruptedException {
// this test assumes that the participant gets registered by a queued add task after enqueuing the remove task
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
doReturn(Optional.empty()).when(localDiscoveryEntryStoreMock)
.lookup(eq(provisionedGlobalDiscoveryEntry.getParticipantId()), anyLong());
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(1)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(provisionedGlobalDiscoveryEntry.getParticipantId()));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
}
@Test(timeout = TEST_TIMEOUT)
public void remove_FailureStates_TimeoutException() throws InterruptedException {
CountDownLatch cdl = new CountDownLatch(2);
doAnswer(createVoidAnswerWithException(cdl,
new JoynrTimeoutException(0))).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
atLeast(2)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
}
@Test(timeout = TEST_TIMEOUT)
public void remove_FailureStates_NonTimeoutException() throws InterruptedException {
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithException(cdl,
new JoynrCommunicationException())).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(1)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
}
@Test(timeout = TEST_TIMEOUT)
public void remove_FailureStates_DiscoveryError_NoEntry() throws InterruptedException {
// covers NO_ENTRY_FOR_PARTICIPANT as well as NO_ENTRY_FOR_SELECTED_BACKENDS
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithDiscoveryError(cdl,
DiscoveryError.NO_ENTRY_FOR_PARTICIPANT)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(1)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(provisionedGlobalDiscoveryEntry.getParticipantId()));
}
@Test(timeout = TEST_TIMEOUT)
public void remove_FailureStates_DiscoveryError_InvalidGbid() throws InterruptedException {
// Also covers UNKNOWN_GBID and INTERNAL_ERROR
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithDiscoveryError(cdl,
DiscoveryError.INVALID_GBID)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(1)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
}
private void testRemoveUsesSameGbidOrderAsAdd(String[] selectedGbids) throws InterruptedException {
String[] expectedGbids = selectedGbids.clone();
String participantId = LocalCapabilitiesDirectoryTest.class.getName() + ".removeUsesSameGbidOrderAsAdd."
+ Arrays.toString(selectedGbids);
String domain = "testDomain";
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.GLOBAL);
globalDiscoveryEntry = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
boolean awaitGlobalRegistration = true;
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(globalDiscoveryEntry,
awaitGlobalRegistration,
selectedGbids);
checkPromiseSuccess(promise, "add failed in testRemoveUsesSameGbidOrderAsAdd");
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(String.class),
any(String[].class));
when(localDiscoveryEntryStoreMock.lookup(globalDiscoveryEntry.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(globalDiscoveryEntry));
localCapabilitiesDirectory.remove(globalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(String.class),
eq(expectedGbids));
verifyNoMoreInteractions(routingTable);
}
@Test(timeout = TEST_TIMEOUT)
public void testRemoveUsesSameGbidOrderAsAdd() throws InterruptedException {
testRemoveUsesSameGbidOrderAsAdd(new String[]{ knownGbids[0] });
testRemoveUsesSameGbidOrderAsAdd(new String[]{ knownGbids[1] });
testRemoveUsesSameGbidOrderAsAdd(new String[]{ knownGbids[0], knownGbids[1] });
testRemoveUsesSameGbidOrderAsAdd(new String[]{ knownGbids[1], knownGbids[0] });
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerDoesNotCrashOnExceptionAfterRemoveTaskFinished() throws InterruptedException,
IllegalAccessException {
/// We have to add before we can remove anything
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(),
anyLong(),
eq(expectedGbids));
reset(globalCapabilitiesDirectoryClient);
///
///The real test starts here
CountDownLatch cdl1 = new CountDownLatch(1);
CountDownLatch cdl2 = new CountDownLatch(1);
AtomicBoolean cbCalled = new AtomicBoolean();
GcdTask.CallbackCreator callbackCreator = new GcdTask.CallbackCreator() {
@Override
public CallbackWithModeledError<Void, DiscoveryError> createCallback() {
return new CallbackWithModeledError<Void, DiscoveryError>() {
@Override
public void onFailure(DiscoveryError errorEnum) {
// taskFinished is called manually
logger.error("onFailure callback called, DiscoveryError {}", errorEnum);
cbCalled.set(true);
}
@Override
public void onFailure(JoynrRuntimeException runtimeException) {
// taskFinished is called manually
logger.error("onFailure callback called:", runtimeException);
cbCalled.set(true);
}
@Override
public void onSuccess(Void result) {
// taskFinished is called manually
logger.error("onSuccess callback called");
cbCalled.set(true);
}
};
}
};
class TestGcdRemoveTask extends GcdTask {
public TestGcdRemoveTask(CallbackCreator callbackCreator, String participantId) {
super(MODE.REMOVE, callbackCreator, participantId, null, null, 0l, true);
}
@Override
public String getParticipantId() {
cdl1.countDown();
try {
// block GcdTaskSequencer until taskFinished has been called
cdl2.await();
} catch (InterruptedException e) {
// ignore
}
return super.getParticipantId();
}
}
TestGcdRemoveTask task = new TestGcdRemoveTask(callbackCreator, globalDiscoveryEntry.getParticipantId());
gcdTaskSequencer.addTask(task);
assertTrue(cdl1.await(DEFAULT_WAIT_TIME_MS * 100, TimeUnit.MILLISECONDS));
// call taskFinished while task is processed
gcdTaskSequencer.taskFinished();
cdl2.countDown();
verify(globalCapabilitiesDirectoryClient,
timeout(1000).times(1)).remove(any(), eq(globalDiscoveryEntry.getParticipantId()), eq(expectedGbids));
// check that GcdTaskSequencer is still alive
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
verify(globalCapabilitiesDirectoryClient, timeout(1000).times(1)).add(any(),
any(),
anyLong(),
eq(expectedGbids));
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
assertFalse(cbCalled.get());
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterRemoveSuccess() throws InterruptedException, IllegalAccessException {
/// We have to add before we can remove anything
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(),
anyLong(),
eq(expectedGbids));
reset(globalCapabilitiesDirectoryClient);
///
///The real test starts here
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).remove(any(), any(), any());
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.REMOVE.equals(arg.getMode())));
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).remove(callbackCaptor.capture(),
eq(discoveryEntry.getParticipantId()),
eq(expectedGbids));
callbackCaptor.getValue().onSuccess(null);
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerRetriesRemoveOnJoynrTimeoutExceptionOnly() throws InterruptedException,
IllegalAccessException {
///We need to add before we can remove
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(),
anyLong(),
eq(expectedGbids));
reset(globalCapabilitiesDirectoryClient);
///
///The real test starts here
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).remove(any(), any(), any());
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.REMOVE.equals(arg.getMode())));
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).remove(callbackCaptor.capture(),
eq(discoveryEntry.getParticipantId()),
eq(expectedGbids));
CountDownLatch cdl2 = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl2.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).remove(any(), any(), any());
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
verify(gcdTaskSequencerSpy).retryTask();
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
assertTrue(cdl2.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(2)).remove(callbackCaptor.capture(),
eq(discoveryEntry.getParticipantId()),
eq(expectedGbids));
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
verify(gcdTaskSequencerSpy).taskFinished();
// After handling a non-timeout exception, the callback is 'disabled'
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterRemoveDiscoveryError() throws InterruptedException,
IllegalAccessException {
///We need to add before we can remove
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(),
anyLong(),
eq(expectedGbids));
reset(globalCapabilitiesDirectoryClient);
///
///The real test starts here
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).remove(any(), any(), any());
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.REMOVE.equals(arg.getMode())));
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).remove(callbackCaptor.capture(),
eq(discoveryEntry.getParticipantId()),
eq(expectedGbids));
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void callTouchForGlobalParticipantIds() throws InterruptedException {
final String participantId1 = "participantId1";
final String participantId2 = "participantId2";
final long toleranceMs = freshnessUpdateIntervalMs * 2 / 3;
GlobalDiscoveryEntry entry1 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
entry1.getQos().setScope(ProviderScope.GLOBAL);
entry1.setParticipantId(participantId1);
GlobalDiscoveryEntry entry2 = new GlobalDiscoveryEntry(entry1);
entry2.setParticipantId(participantId2);
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(entry1, true);
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(entry2, true);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
ArgumentCaptor<Long> lastSeenDateCaptor = ArgumentCaptor.forClass(Long.class);
ArgumentCaptor<Long> expiryDateCaptor = ArgumentCaptor.forClass(Long.class);
String[] touchedParticipantIds = new String[]{ participantId1, participantId2 };
String[] expectedParticipantIds = touchedParticipantIds.clone();
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(touchedParticipantIds);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.touch(ArgumentMatchers.<Callback<Void>> any(),
eq(expectedParticipantIds),
anyString());
Thread.sleep(freshnessUpdateIntervalMs); // make sure that the inital delay has expired before starting the runnable
final long expectedLastSeenDateMs = System.currentTimeMillis();
final long expectedExpiryDateMs = expectedLastSeenDateMs + DEFAULT_EXPIRY_TIME_MS;
Runnable runnable = runnableCaptor.getValue();
runnable.run();
verify(localDiscoveryEntryStoreMock, times(1)).touchDiscoveryEntries(lastSeenDateCaptor.capture(),
expiryDateCaptor.capture());
assertTrue(Math.abs(lastSeenDateCaptor.getValue() - expectedLastSeenDateMs) <= toleranceMs);
assertTrue(Math.abs(expiryDateCaptor.getValue() - expectedExpiryDateMs) <= toleranceMs);
verify(globalDiscoveryEntryCacheMock, times(1)).touchDiscoveryEntries(eq(expectedParticipantIds),
eq(lastSeenDateCaptor.getValue()),
eq(expiryDateCaptor.getValue()));
assertTrue(Math.abs(lastSeenDateCaptor.getValue() - expectedLastSeenDateMs) <= toleranceMs);
assertTrue(Math.abs(expiryDateCaptor.getValue() - expectedExpiryDateMs) <= toleranceMs);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(expectedParticipantIds),
anyString());
}
@Test
public void touchNotCalled_noParticipantIdsToTouch() throws InterruptedException {
String[] participantIdsToTouch = new String[0];
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(participantIdsToTouch);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
Runnable runnable = runnableCaptor.getValue();
runnable.run();
verify(globalCapabilitiesDirectoryClient, times(0)).touch(ArgumentMatchers.<Callback<Void>> any(),
any(),
anyString());
}
@Test
public void touchCalledOnce_multipleParticipantIdsForSingleGbid() throws InterruptedException {
String participantId1 = "participantId1";
String participantId2 = "participantId2";
String gbid = knownGbids[1];
String[] gbids = { gbid };
GlobalDiscoveryEntry entry1 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
entry1.getQos().setScope(ProviderScope.GLOBAL);
entry1.setParticipantId(participantId1);
entry1.setExpiryDateMs(0l);
entry1.setLastSeenDateMs(0l);
GlobalDiscoveryEntry entry2 = new GlobalDiscoveryEntry(entry1);
entry2.setParticipantId(participantId2);
Promise<Add1Deferred> promiseAdd1 = localCapabilitiesDirectory.add(entry1, true, gbids);
Promise<Add1Deferred> promiseAdd2 = localCapabilitiesDirectory.add(entry2, true, gbids);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
// Mock return values of localDiscoveryEntryStore.touchDiscoveryEntries
String[] participantIdsToTouch = new String[]{ participantId1, participantId2 };
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(participantIdsToTouch);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.touch(ArgumentMatchers.<Callback<Void>> any(),
eq(participantIdsToTouch),
anyString());
Thread.sleep(freshnessUpdateIntervalMs); // make sure that the initial delay has expired before starting the runnable
Runnable runnable = runnableCaptor.getValue();
runnable.run();
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(participantIdsToTouch),
eq(gbid));
}
@Test
public void touchCalledOnce_singleParticipantIdForMultipleGbids() throws InterruptedException {
String participantId1 = "participantId1";
String gbid1 = knownGbids[1];
String gbid2 = knownGbids[2];
String[] gbids = { gbid1, gbid2 };
GlobalDiscoveryEntry entry1 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
entry1.getQos().setScope(ProviderScope.GLOBAL);
entry1.setParticipantId(participantId1);
entry1.setExpiryDateMs(0l);
entry1.setLastSeenDateMs(0l);
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(entry1, true, gbids);
checkPromiseSuccess(promiseAdd, "add failed");
// Mock return values of localDiscoveryEntryStore.touchDiscoveryEntries
String[] participantIdsToTouch = new String[]{ participantId1 };
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(participantIdsToTouch);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.touch(ArgumentMatchers.<Callback<Void>> any(),
eq(participantIdsToTouch),
anyString());
Thread.sleep(freshnessUpdateIntervalMs); // make sure that the initial delay has expired before starting the runnable
Runnable runnable = runnableCaptor.getValue();
runnable.run();
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(participantIdsToTouch),
eq(gbid1));
}
@Test
public void touchCalledTwice_twoParticipantIdsForDifferentGbids() throws InterruptedException {
String participantId1 = "participantId1";
String participantId2 = "participantId2";
String gbid1 = knownGbids[1];
String gbid2 = knownGbids[2];
String[] gbids1 = { gbid1 };
String[] gbids2 = { gbid2 };
GlobalDiscoveryEntry entry1 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
entry1.getQos().setScope(ProviderScope.GLOBAL);
entry1.setParticipantId(participantId1);
entry1.setExpiryDateMs(0l);
entry1.setLastSeenDateMs(0l);
GlobalDiscoveryEntry entry2 = new GlobalDiscoveryEntry(entry1);
entry2.setParticipantId(participantId2);
Promise<Add1Deferred> promiseAdd1 = localCapabilitiesDirectory.add(entry1, true, gbids1);
Promise<Add1Deferred> promiseAdd2 = localCapabilitiesDirectory.add(entry2, true, gbids2);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
// Mock return values of localDiscoveryEntryStore.touchDiscoveryEntries
String[] participantIdsToTouch = new String[]{ participantId1, participantId2 };
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(participantIdsToTouch);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
String[] expectedParticipantIds1 = new String[]{ participantId1 };
String[] expectedParticipantIds2 = new String[]{ participantId2 };
CountDownLatch cdl = new CountDownLatch(2);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.touch(ArgumentMatchers.<Callback<Void>> any(),
ArgumentMatchers.<String[]> any(),
anyString());
Thread.sleep(freshnessUpdateIntervalMs); // make sure that the initial delay has expired before starting the runnable
Runnable runnable = runnableCaptor.getValue();
runnable.run();
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(expectedParticipantIds1),
eq(gbid1));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(expectedParticipantIds2),
eq(gbid2));
}
@Test
public void removeStaleProvidersOfClusterController_invokesGcdClient() {
// Test whether removeStale() of GlobalCapabiltiesDirectoryClient is called once for all known backends
// and captured argument of maxLastSeenDateMs differs from current time less than threshold.
final long currentDateMs = System.currentTimeMillis();
ArgumentCaptor<Long> maxLastSeenDateCaptor = ArgumentCaptor.forClass(Long.class);
final long toleranceMs = 200L;
localCapabilitiesDirectory.removeStaleProvidersOfClusterController();
ArgumentCaptor<String> gbidCaptor = ArgumentCaptor.forClass(String.class);
verify(globalCapabilitiesDirectoryClient,
times(knownGbids.length)).removeStale(ArgumentMatchers.<Callback<Void>> any(),
maxLastSeenDateCaptor.capture(),
gbidCaptor.capture());
assertTrue(maxLastSeenDateCaptor.getValue() <= currentDateMs);
assertTrue(currentDateMs - maxLastSeenDateCaptor.getValue() <= toleranceMs);
List<String> actualGbids = gbidCaptor.getAllValues();
assertEquals(Arrays.asList(knownGbids), actualGbids);
}
@Test
public void removeStaleProvidersOfClusterController_callsItselfOnCallbackFailure() {
// Test whether removeStaleProvidersOfClusterController() is calling itself n-times
// when callback function is calling onFailure(exception) function.
int numberOfOnFailureCalls = 2;
JoynrRuntimeException exception = new JoynrRuntimeException("removeStale failed");
for (String gbid : knownGbids) {
doAnswer(new Answer<Future<Void>>() {
private int count = 0;
@Override
public Future<Void> answer(InvocationOnMock invocation) throws Throwable {
Future<Void> result = new Future<Void>();
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
if (count++ == numberOfOnFailureCalls) {
callback.onSuccess(null);
result.onSuccess(null);
return result;
}
callback.onFailure(exception);
result.onSuccess(null);
return result;
}
}).when(globalCapabilitiesDirectoryClient)
.removeStale(ArgumentMatchers.<Callback<Void>> any(), anyLong(), eq(gbid));
}
localCapabilitiesDirectory.removeStaleProvidersOfClusterController();
int numberOfCalls = numberOfOnFailureCalls + 1; // one time success
for (String gbid : knownGbids) {
verify(globalCapabilitiesDirectoryClient,
times(numberOfCalls)).removeStale(ArgumentMatchers.<Callback<Void>> any(), anyLong(), eq(gbid));
}
}
@Test
public void removeStaleProvidersOfClusterController_calledOnceIfMessageNotSent() {
// Test whether removeStale() of GlobalCapabiltiesDirectoryClient is called once when exception
// in a gbid has a type JoynrMessageNotSentException and contains "Address type not supported" message
JoynrRuntimeException exception = new JoynrMessageNotSentException("Address type not supported");
doAnswer(new Answer<Future<Void>>() {
@Override
public Future<Void> answer(InvocationOnMock invocation) throws Throwable {
Future<Void> result = new Future<Void>();
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
callback.onFailure(exception);
result.onSuccess(null);
return result;
}
}).when(globalCapabilitiesDirectoryClient)
.removeStale(ArgumentMatchers.<Callback<Void>> any(), anyLong(), anyString());
localCapabilitiesDirectory.removeStaleProvidersOfClusterController();
for (String gbid : knownGbids) {
verify(globalCapabilitiesDirectoryClient, times(1)).removeStale(ArgumentMatchers.<Callback<Void>> any(),
anyLong(),
eq(gbid));
}
}
@Test
public void removeStaleProvidersOfClusterController_noRetryIfRetryDurationExceeded() {
final long removeStaleMaxRetryMs = 3600000;
// Set a custom value of cluster controller start time to simulate timeout for removeStale retries
final long ccStartUpDateMs = removeStaleMaxRetryMs + 1;
try {
setFieldValue(localCapabilitiesDirectory, "ccStartUpDateInMs", ccStartUpDateMs);
} catch (Exception e) {
fail("Couldn't set start date of cluster controller in milliseconds.");
}
JoynrRuntimeException exception = new JoynrRuntimeException("removeStale failed");
for (String gbid : knownGbids) {
doAnswer(new Answer<Future<Void>>() {
@Override
public Future<Void> answer(InvocationOnMock invocation) throws Throwable {
Future<Void> result = new Future<Void>();
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
callback.onFailure(exception);
result.onSuccess(null);
return result;
}
}).when(globalCapabilitiesDirectoryClient)
.removeStale(ArgumentMatchers.<Callback<Void>> any(), anyLong(), eq(gbid));
}
localCapabilitiesDirectory.removeStaleProvidersOfClusterController();
for (String gbid : knownGbids) {
verify(globalCapabilitiesDirectoryClient, times(1)).removeStale(ArgumentMatchers.<Callback<Void>> any(),
eq(ccStartUpDateMs),
eq(gbid));
}
}
@Test(timeout = TEST_TIMEOUT)
public void addAndRemoveAreCalledInOrder() throws InterruptedException {
final String participantId1 = "participantId1";
final String participantId2 = "participantId2";
DiscoveryEntry discoveryEntry1 = new DiscoveryEntry(discoveryEntry);
discoveryEntry1.getQos().setScope(ProviderScope.GLOBAL);
discoveryEntry1.setParticipantId(participantId1);
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
discoveryEntry2.setParticipantId(participantId2);
GlobalDiscoveryEntry globalDiscoveryEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry1,
globalAddress1);
GlobalDiscoveryEntry globalDiscoveryEntry2 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry2,
globalAddress1);
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry1, awaitGlobalRegistration);
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry2, awaitGlobalRegistration);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
InOrder inOrder = inOrder(globalCapabilitiesDirectoryClient);
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
inOrder.verify(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
remainingTtlCapture.capture(),
any(String[].class));
checkRemainingTtl(remainingTtlCapture);
inOrder.verify(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry2)),
remainingTtlCapture.capture(),
any(String[].class));
checkRemainingTtl(remainingTtlCapture);
CountDownLatch cdl = new CountDownLatch(2);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
anyString(),
any(String[].class));
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry2.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry2));
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry1.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry1));
localCapabilitiesDirectory.remove(discoveryEntry2.getParticipantId());
localCapabilitiesDirectory.remove(discoveryEntry1.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
inOrder.verify(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId2),
any(String[].class));
inOrder.verify(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId1),
any(String[].class));
}
private static class GlobalDiscoveryEntryWithParticipantIdMatcher implements ArgumentMatcher<GlobalDiscoveryEntry> {
private GlobalDiscoveryEntry expected;
private GlobalDiscoveryEntryWithParticipantIdMatcher(GlobalDiscoveryEntry expected) {
this.expected = expected;
}
@Override
public boolean matches(GlobalDiscoveryEntry argument) {
assertNotNull(argument);
GlobalDiscoveryEntry actual = (GlobalDiscoveryEntry) argument;
return expected.getParticipantId() == actual.getParticipantId()
&& expected.getAddress().equals(actual.getAddress());
}
}
private void setNewDefaultTtlAddAndRemove(long defaulTtlMs) throws ReflectiveOperationException {
Field defaulTtlMsField = LocalCapabilitiesDirectoryImpl.class.getDeclaredField("defaultTtlAddAndRemove");
defaulTtlMsField.setAccessible(true);
defaulTtlMsField.set((Object) localCapabilitiesDirectory, defaulTtlMs);
}
@Test(timeout = TEST_TIMEOUT)
public void testProcessingExpiredQueuedGcdActions() throws Exception {
reset(globalCapabilitiesDirectoryClient);
// defaultTtlAddAndRemove = 60000ms (MessagingQos.DEFAULT_TTL) is too long, we reduce it to 1000ms for the test
setNewDefaultTtlAddAndRemove(1000);
final String participantId1 = "participantId1";
final String participantId2 = "participantId2";
DiscoveryEntry discoveryEntry1 = new DiscoveryEntry(discoveryEntry);
discoveryEntry1.getQos().setScope(ProviderScope.GLOBAL);
discoveryEntry1.setParticipantId(participantId1);
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry1);
discoveryEntry2.setParticipantId(participantId2);
GlobalDiscoveryEntry globalDiscoveryEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry1,
globalAddress1);
GlobalDiscoveryEntry globalDiscoveryEntry2 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry2,
globalAddress1);
final long delay = 1500;
CountDownLatch cdlAddDelayStarted = new CountDownLatch(1);
CountDownLatch cdlAddDone = new CountDownLatch(1);
doAnswer(createAnswerWithDelayedSuccess(cdlAddDelayStarted,
cdlAddDone,
delay)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
anyLong(),
any(String[].class));
CountDownLatch cdlRemove = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdlRemove)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId1),
any(String[].class));
// 3 actions. 2 lcd.add and 1 lcd.remove
final Boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry1, awaitGlobalRegistration);
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry2, awaitGlobalRegistration);
localCapabilitiesDirectory.remove(discoveryEntry1.getParticipantId());
assertTrue(cdlAddDelayStarted.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
JoynrRuntimeException expectedException = new JoynrRuntimeException("Failed to process global registration in time, please try again");
checkPromiseException(promiseAdd2, new ProviderRuntimeException(expectedException.toString()));
// second add failed before first add has finished, remove not yet executed
assertEquals(1, cdlAddDone.getCount());
assertEquals(1, cdlRemove.getCount());
checkPromiseSuccess(promiseAdd1, "add failed");
assertTrue(cdlAddDone.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
assertTrue(cdlRemove.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
InOrder inOrder = inOrder(globalCapabilitiesDirectoryClient);
inOrder.verify(globalCapabilitiesDirectoryClient, times(1))
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
anyLong(),
any(String[].class));
inOrder.verify(globalCapabilitiesDirectoryClient, times(1))
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId1),
any(String[].class));
verify(globalCapabilitiesDirectoryClient,
times(0)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry2)),
anyLong(),
any(String[].class));
}
@Test(timeout = TEST_TIMEOUT)
public void testReAddAllGlobalDiscoveryEntriesPeriodically() throws InterruptedException {
final String participantId1 = "participantId1";
final String participantId2 = "participantId2";
DiscoveryEntry discoveryEntry1 = new DiscoveryEntry(discoveryEntry);
discoveryEntry1.getQos().setScope(ProviderScope.GLOBAL);
discoveryEntry1.setParticipantId(participantId1);
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry1);
discoveryEntry2.setParticipantId(participantId2);
GlobalDiscoveryEntry globalDiscoveryEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry1,
globalAddress1);
GlobalDiscoveryEntry globalDiscoveryEntry2 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry2,
globalAddress1);
final boolean awaitGlobalRegistration = true;
String[] gbids1 = new String[]{ knownGbids[0] };
String[] expectedGbids1 = gbids1.clone();
String[] gbids2 = new String[]{ knownGbids[1] };
String[] expectedGbids2 = gbids2.clone();
Promise<Add1Deferred> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry1,
awaitGlobalRegistration,
gbids1);
Promise<Add1Deferred> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry2,
awaitGlobalRegistration,
gbids2);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
reset(globalCapabilitiesDirectoryClient);
CountDownLatch cdlReAdd = new CountDownLatch(2);
doAnswer(createAnswerWithSuccess(cdlReAdd)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
anyLong(),
eq(gbids1));
doAnswer(createAnswerWithSuccess(cdlReAdd)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry2)),
anyLong(),
eq(gbids2));
Set<DiscoveryEntry> globalEntries = new HashSet<>();
globalEntries.add(discoveryEntry1);
globalEntries.add(discoveryEntry2);
when(localDiscoveryEntryStoreMock.getAllGlobalEntries()).thenReturn(globalEntries);
verify(globalCapabilitiesDirectoryClient, times(0)).add(any(), any(), anyLong(), any());
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(RE_ADD_INTERVAL_DAYS),
eq(RE_ADD_INTERVAL_DAYS),
eq(TimeUnit.DAYS));
// capture the runnable and execute it to schedule the re-add task
Runnable runnable = runnableCaptor.getValue();
runnable.run();
assertTrue(cdlReAdd.await(defaultTtlAddAndRemove, TimeUnit.MILLISECONDS));
// check whether add method has been called for 2 non expired entries
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
eq(defaultTtlAddAndRemove),
eq(expectedGbids1));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry2)),
eq(defaultTtlAddAndRemove),
eq(expectedGbids2));
}
}
| java/core/clustercontroller/src/test/java/io/joynr/capabilities/LocalCapabilitiesDirectoryTest.java | /*
* #%L
* %%
* Copyright (C) 2020 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.joynr.capabilities;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.Captor;
import org.mockito.InOrder;
import org.mockito.ArgumentMatchers;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.joynr.capabilities.LocalCapabilitiesDirectoryImpl.GcdTaskSequencer;
import io.joynr.dispatching.Dispatcher;
import io.joynr.exceptions.JoynrCommunicationException;
import io.joynr.exceptions.JoynrException;
import io.joynr.exceptions.JoynrMessageNotSentException;
import io.joynr.exceptions.JoynrRuntimeException;
import io.joynr.exceptions.JoynrTimeoutException;
import io.joynr.messaging.MessagingQos;
import io.joynr.messaging.routing.RoutingTable;
import io.joynr.messaging.routing.TransportReadyListener;
import io.joynr.provider.AbstractDeferred;
import io.joynr.provider.DeferredVoid;
import io.joynr.provider.Promise;
import io.joynr.provider.PromiseListener;
import io.joynr.proxy.Callback;
import io.joynr.proxy.CallbackWithModeledError;
import io.joynr.proxy.Future;
import io.joynr.proxy.ProxyBuilderFactory;
import io.joynr.runtime.GlobalAddressProvider;
import io.joynr.runtime.JoynrRuntime;
import io.joynr.runtime.ShutdownNotifier;
import io.joynr.util.ObjectMapper;
import joynr.exceptions.ApplicationException;
import joynr.exceptions.ProviderRuntimeException;
import joynr.infrastructure.GlobalCapabilitiesDirectory;
import joynr.system.DiscoveryProvider.Add1Deferred;
import joynr.system.DiscoveryProvider.AddToAllDeferred;
import joynr.system.DiscoveryProvider.Lookup1Deferred;
import joynr.system.DiscoveryProvider.Lookup2Deferred;
import joynr.system.DiscoveryProvider.Lookup3Deferred;
import joynr.system.DiscoveryProvider.Lookup4Deferred;
import joynr.system.RoutingTypes.Address;
import joynr.system.RoutingTypes.MqttAddress;
import joynr.types.CustomParameter;
import joynr.types.DiscoveryEntry;
import joynr.types.DiscoveryEntryWithMetaInfo;
import joynr.types.DiscoveryError;
import joynr.types.DiscoveryQos;
import joynr.types.DiscoveryScope;
import joynr.types.GlobalDiscoveryEntry;
import joynr.types.ProviderQos;
import joynr.types.ProviderScope;
import joynr.types.Version;
@RunWith(MockitoJUnitRunner.class)
public class LocalCapabilitiesDirectoryTest {
private static final Logger logger = LoggerFactory.getLogger(LocalCapabilitiesDirectoryTest.class);
private static final int TEST_TIMEOUT = 10000;
private static final int DEFAULT_WAIT_TIME_MS = 5000; // value should be shorter than TEST_TIMEOUT
private static final String INTERFACE_NAME = "interfaceName";
private static final String TEST_URL = "mqtt://testUrl:42";
private static final long ONE_DAY_IN_MS = 1 * 24 * 60 * 60 * 1000;
private static final long freshnessUpdateIntervalMs = 300;
private static final long DEFAULT_EXPIRY_TIME_MS = 3628800000l;
private static final long RE_ADD_INTERVAL_DAYS = 7l;
private static final long defaultTtlAddAndRemove = MessagingQos.DEFAULT_TTL;
private LocalCapabilitiesDirectory localCapabilitiesDirectory;
private String[] knownGbids = { "testDEFAULTgbid", "testgbid2", "testGbid" };
private Long expiryDateMs = System.currentTimeMillis() + ONE_DAY_IN_MS;
private String publicKeyId = "publicKeyId";
private MqttAddress globalAddress1;
private String globalAddress1Serialized;
private MqttAddress globalAddress2;
private String globalAddress2Serialized;
private MqttAddress globalAddressWithoutGbid;
private String globalAddressWithoutGbidSerialized;
private DiscoveryEntry discoveryEntry;
private DiscoveryEntry expectedDiscoveryEntry;
private GlobalDiscoveryEntry globalDiscoveryEntry;
private GlobalDiscoveryEntry expectedGlobalDiscoveryEntry;
private GlobalDiscoveryEntry provisionedGlobalDiscoveryEntry;
@Mock
JoynrRuntime runtime;
@Mock
private GlobalCapabilitiesDirectoryClient globalCapabilitiesDirectoryClient;
@Mock
private ExpiredDiscoveryEntryCacheCleaner expiredDiscoveryEntryCacheCleaner;
@Mock
private RoutingTable routingTable;
@Mock
private Dispatcher dispatcher;
@Mock
private ProxyBuilderFactory proxyBuilderFactoryMock;
@Mock
private DiscoveryEntryStore<DiscoveryEntry> localDiscoveryEntryStoreMock;
@Mock
private DiscoveryEntryStore<GlobalDiscoveryEntry> globalDiscoveryEntryCacheMock;
@Mock
private GlobalAddressProvider globalAddressProvider;
@Mock
private CapabilitiesProvisioning capabilitiesProvisioning;
@Mock
private ScheduledExecutorService capabilitiesFreshnessUpdateExecutor;
@Mock
private ShutdownNotifier shutdownNotifier;
@Captor
private ArgumentCaptor<Collection<DiscoveryEntryWithMetaInfo>> capabilitiesCaptor;
@Captor
private ArgumentCaptor<Runnable> runnableCaptor;
@Captor
private ArgumentCaptor<GcdTaskSequencer> addRemoveQueueRunnableCaptor;
@Captor
ArgumentCaptor<CallbackWithModeledError<Void, DiscoveryError>> callbackCaptor;
private GcdTaskSequencer gcdTaskSequencerSpy;
private GcdTaskSequencer gcdTaskSequencer;
private Thread addRemoveWorker;
private static class DiscoveryEntryWithUpdatedLastSeenDateMsMatcher implements ArgumentMatcher<DiscoveryEntry> {
@Override
public String toString() {
String description = "expected: " + expected;
return description;
}
private DiscoveryEntry expected;
private DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(DiscoveryEntry expected) {
this.expected = expected;
}
@Override
public boolean matches(DiscoveryEntry argument) {
assertNotNull(argument);
DiscoveryEntry actual = (DiscoveryEntry) argument;
return discoveryEntriesMatchWithUpdatedLastSeenDate(expected, actual);
}
}
private static class GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher
implements ArgumentMatcher<GlobalDiscoveryEntry> {
private GlobalDiscoveryEntry expected;
private GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(GlobalDiscoveryEntry expected) {
this.expected = expected;
}
@Override
public boolean matches(GlobalDiscoveryEntry argument) {
assertNotNull(argument);
GlobalDiscoveryEntry actual = (GlobalDiscoveryEntry) argument;
return globalDiscoveryEntriesMatchWithUpdatedLastSeenDate(expected, actual);
}
}
private static boolean discoveryEntriesMatchWithUpdatedLastSeenDate(DiscoveryEntry expected,
DiscoveryEntry actual) {
return expected.getDomain() == actual.getDomain() && expected.getExpiryDateMs() == actual.getExpiryDateMs()
&& expected.getInterfaceName() == actual.getInterfaceName()
&& expected.getParticipantId() == actual.getParticipantId()
&& expected.getProviderVersion().equals(actual.getProviderVersion())
&& expected.getPublicKeyId() == actual.getPublicKeyId() && expected.getQos().equals(actual.getQos())
&& expected.getLastSeenDateMs() <= actual.getLastSeenDateMs()
&& (expected.getLastSeenDateMs() + 1000) >= actual.getLastSeenDateMs();
}
private static boolean discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(DiscoveryEntryWithMetaInfo expected,
DiscoveryEntryWithMetaInfo actual) {
return discoveryEntriesMatchWithUpdatedLastSeenDate(expected, actual)
&& expected.getIsLocal() == actual.getIsLocal();
}
private static boolean globalDiscoveryEntriesMatchWithUpdatedLastSeenDate(GlobalDiscoveryEntry expected,
GlobalDiscoveryEntry actual) {
return discoveryEntriesMatchWithUpdatedLastSeenDate(expected, actual)
&& expected.getAddress().equals(actual.getAddress());
}
private Field getPrivateField(Class<?> privateClass, String fieldName) {
Field result = null;
try {
result = privateClass.getDeclaredField(fieldName);
} catch (Exception e) {
fail(e.getMessage());
}
return result;
}
private <T> void setFieldValue(Object object, String fieldName, T value) throws IllegalArgumentException,
IllegalAccessException {
Field objectField = getPrivateField(object.getClass(), fieldName);
assertNotNull(objectField);
objectField.setAccessible(true);
objectField.set(object, value);
}
@Before
public void setUp() throws Exception {
ObjectMapper objectMapper = new ObjectMapper();
globalAddress1 = new MqttAddress(knownGbids[0], "testTopic");
globalAddress1Serialized = objectMapper.writeValueAsString(globalAddress1);
globalAddress2 = new MqttAddress(knownGbids[1], "testTopic");
globalAddress2Serialized = objectMapper.writeValueAsString(globalAddress2);
globalAddressWithoutGbid = new MqttAddress("brokerUri", "testTopic");
globalAddressWithoutGbidSerialized = objectMapper.writeValueAsString(globalAddressWithoutGbid);
Field objectMapperField = CapabilityUtils.class.getDeclaredField("objectMapper");
objectMapperField.setAccessible(true);
objectMapperField.set(CapabilityUtils.class, objectMapper);
doAnswer(createAnswerWithSuccess()).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
String discoveryDirectoriesDomain = "io.joynr";
String capabilitiesDirectoryParticipantId = "capDir_participantId";
String capabiltitiesDirectoryTopic = "dirTopic";
GlobalDiscoveryEntry globalCapabilitiesDirectoryDiscoveryEntry = CapabilityUtils.newGlobalDiscoveryEntry(new Version(0,
1),
discoveryDirectoriesDomain,
GlobalCapabilitiesDirectory.INTERFACE_NAME,
capabilitiesDirectoryParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
"provisionedPublicKey",
new MqttAddress(TEST_URL,
capabiltitiesDirectoryTopic));
provisionedGlobalDiscoveryEntry = CapabilityUtils.newGlobalDiscoveryEntry(new Version(0, 1),
"provisioneddomain",
"provisionedInterface",
"provisionedParticipantId",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
"provisionedPublicKey",
new MqttAddress("provisionedbrokeruri",
"provisionedtopic"));
when(capabilitiesProvisioning.getDiscoveryEntries()).thenReturn(new HashSet<GlobalDiscoveryEntry>(Arrays.asList(globalCapabilitiesDirectoryDiscoveryEntry,
provisionedGlobalDiscoveryEntry)));
localCapabilitiesDirectory = new LocalCapabilitiesDirectoryImpl(capabilitiesProvisioning,
globalAddressProvider,
localDiscoveryEntryStoreMock,
globalDiscoveryEntryCacheMock,
routingTable,
globalCapabilitiesDirectoryClient,
expiredDiscoveryEntryCacheCleaner,
freshnessUpdateIntervalMs,
capabilitiesFreshnessUpdateExecutor,
shutdownNotifier,
knownGbids,
DEFAULT_EXPIRY_TIME_MS);
verify(capabilitiesFreshnessUpdateExecutor).schedule(addRemoveQueueRunnableCaptor.capture(),
anyLong(),
eq(TimeUnit.MILLISECONDS));
gcdTaskSequencer = addRemoveQueueRunnableCaptor.getValue();
gcdTaskSequencerSpy = Mockito.spy(gcdTaskSequencer);
addRemoveWorker = new Thread(gcdTaskSequencer);
addRemoveWorker.start();
ProviderQos providerQos = new ProviderQos();
CustomParameter[] parameterList = { new CustomParameter("key1", "value1"),
new CustomParameter("key2", "value2") };
providerQos.setCustomParameters(parameterList);
String participantId = "testParticipantId";
String domain = "domain";
discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain,
INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
expectedDiscoveryEntry = new DiscoveryEntry(discoveryEntry);
globalDiscoveryEntry = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry, globalAddress1);
expectedGlobalDiscoveryEntry = new GlobalDiscoveryEntry(globalDiscoveryEntry);
when(globalAddressProvider.get()).thenReturn(globalAddress1);
when(localDiscoveryEntryStoreMock.lookup(anyString(), anyLong())).thenReturn(Optional.empty());
when(globalDiscoveryEntryCacheMock.lookup(anyString(), anyLong())).thenReturn(Optional.empty());
}
@After
public void tearDown() throws Exception {
gcdTaskSequencer.stop();
addRemoveWorker.join();
}
@Test(timeout = TEST_TIMEOUT)
public void testExpiredDiscoveryEntryCacheCleanerIsInitializenCorrectly() {
verify(expiredDiscoveryEntryCacheCleaner).scheduleCleanUpForCaches(Mockito.<ExpiredDiscoveryEntryCacheCleaner.CleanupAction> any(),
eq(globalDiscoveryEntryCacheMock),
eq(localDiscoveryEntryStoreMock));
}
private void checkAddGlobal_invokesLocalStoreAndGcd(Promise<? extends AbstractDeferred> promise,
String[] expectedGbids) throws InterruptedException {
ArgumentCaptor<GlobalDiscoveryEntry> argumentCaptor = ArgumentCaptor.forClass(GlobalDiscoveryEntry.class);
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argumentCaptor.capture(),
remainingTtlCapture.capture(),
eq(expectedGbids));
GlobalDiscoveryEntry capturedGlobalDiscoveryEntry = argumentCaptor.getValue();
assertNotNull(capturedGlobalDiscoveryEntry);
checkRemainingTtl(remainingTtlCapture);
assertTrue(globalDiscoveryEntriesMatchWithUpdatedLastSeenDate(expectedGlobalDiscoveryEntry,
capturedGlobalDiscoveryEntry));
verify(localDiscoveryEntryStoreMock).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
}
private void checkRemainingTtl(ArgumentCaptor<Long> remainingTtlCaptor) {
long remainingTtl = remainingTtlCaptor.getValue().longValue();
assertTrue(remainingTtl <= MessagingQos.DEFAULT_TTL);
assertTrue(remainingTtl > (MessagingQos.DEFAULT_TTL / 2.0));
}
@Test(timeout = TEST_TIMEOUT)
public void add_global_invokesGcdAndStore() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
String[] expectedGbids = knownGbids;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkAddGlobal_invokesLocalStoreAndGcd(promise, expectedGbids);
}
@Test(timeout = TEST_TIMEOUT)
public void addWithGbids_global_singleNonDefaultGbid_invokesGcdAndStore() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1] };
String[] expectedGbids = gbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids);
checkAddGlobal_invokesLocalStoreAndGcd(promise, expectedGbids);
}
@Test(timeout = TEST_TIMEOUT)
public void addWithGbids_global_multipleGbids_invokesGcdAndStore() throws InterruptedException {
// expectedGbids element order intentionally differs from knownGbids element order
String[] gbids = new String[]{ knownGbids[1], knownGbids[0] };
String[] expectedGbids = gbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids);
checkAddGlobal_invokesLocalStoreAndGcd(promise, expectedGbids);
}
@Test(timeout = TEST_TIMEOUT)
public void addWithGbids_global_emptyGbidArray_addsToKnownBackends() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
String[] gbids = new String[0];
String[] expectedGbids = knownGbids;
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids);
checkAddGlobal_invokesLocalStoreAndGcd(promise, expectedGbids);
}
@Test(timeout = TEST_TIMEOUT)
public void addToAll_global_invokesGcdAndStore() throws InterruptedException {
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkAddGlobal_invokesLocalStoreAndGcd(promise, expectedGbids);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerDoesNotCrashOnExceptionAfterAddTaskFinished() throws InterruptedException,
IllegalAccessException {
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
reset(globalCapabilitiesDirectoryClient);
CountDownLatch cdl1 = new CountDownLatch(1);
CountDownLatch cdl2 = new CountDownLatch(1);
@SuppressWarnings("serial")
class TestGde extends GlobalDiscoveryEntry {
TestGde(GlobalDiscoveryEntry gde) {
super(gde);
}
@Override
public Version getProviderVersion() {
cdl1.countDown();
try {
// block GcdTaskSequencer until taskFinished has been called
cdl2.await();
} catch (InterruptedException e) {
// ignore
}
return super.getProviderVersion();
}
}
AtomicBoolean cbCalled = new AtomicBoolean();
TestGde gde = new TestGde(globalDiscoveryEntry);
GcdTask.CallbackCreator callbackCreator = new GcdTask.CallbackCreator() {
@Override
public CallbackWithModeledError<Void, DiscoveryError> createCallback() {
return new CallbackWithModeledError<Void, DiscoveryError>() {
@Override
public void onFailure(DiscoveryError errorEnum) {
// taskFinished is called manually
logger.error("onFailure callback called, DiscoveryError {}", errorEnum);
cbCalled.set(true);
}
@Override
public void onFailure(JoynrRuntimeException runtimeException) {
// taskFinished is called manually
logger.error("onFailure callback called:", runtimeException);
cbCalled.set(true);
}
@Override
public void onSuccess(Void result) {
// taskFinished is called manually
logger.error("onSuccess callback called");
cbCalled.set(true);
}
};
}
};
GcdTask task = GcdTask.createAddTask(callbackCreator, gde, expiryDateMs, knownGbids, true);
gcdTaskSequencer.addTask(task);
assertTrue(cdl1.await(DEFAULT_WAIT_TIME_MS * 100, TimeUnit.MILLISECONDS));
// call taskFinished while task is processed
gcdTaskSequencer.taskFinished();
cdl2.countDown();
verify(globalCapabilitiesDirectoryClient,
timeout(1000).times(1)).add(any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
// check that GcdTaskSequencer is still alive
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
verify(globalCapabilitiesDirectoryClient, timeout(1000).times(2)).add(any(),
any(),
anyLong(),
eq(expectedGbids));
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
assertFalse(cbCalled.get());
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterAddSuccess() throws InterruptedException, IllegalAccessException {
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
reset(globalCapabilitiesDirectoryClient);
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).add(any(), any(), anyLong(), any());
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS * 100, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(callbackCaptor.capture(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.ADD.equals(arg.getMode())));
callbackCaptor.getValue().onSuccess(null);
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterAddTimeoutOnDisabledRetry() throws InterruptedException,
IllegalAccessException {
String[] expectedGbids = knownGbids.clone();
// Retries are disabled when awaitGlobalRegistration is true
final boolean awaitGlobalRegistration = true;
reset(globalCapabilitiesDirectoryClient);
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).add(any(), any(), anyLong(), any());
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(callbackCaptor.capture(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.ADD.equals(arg.getMode())));
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerRetriesAddOnJoynrTimeoutExceptionOnly() throws InterruptedException,
IllegalAccessException {
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = false;
reset(globalCapabilitiesDirectoryClient);
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
Semaphore semaphore = new Semaphore(0);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
semaphore.release();
return null;
}
}).when(globalCapabilitiesDirectoryClient).add(any(), any(), anyLong(), any());
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
assertTrue(semaphore.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).add(callbackCaptor.capture(),
any(),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.ADD.equals(arg.getMode())));
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
verify(gcdTaskSequencerSpy).retryTask();
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
assertTrue(semaphore.tryAcquire(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(2)).add(callbackCaptor.capture(),
any(),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy, never()).taskFinished();
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterAddDiscoveryError() throws InterruptedException, IllegalAccessException {
String[] expectedGbids = new String[]{ knownGbids[0] };
final boolean awaitGlobalRegistration = true;
reset(globalCapabilitiesDirectoryClient);
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).add(any(), any(), anyLong(), any());
localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, expectedGbids);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).add(callbackCaptor.capture(),
any(),
anyLong(),
eq(expectedGbids));
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.ADD.equals(arg.getMode())));
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void add_local_doesNotInvokeGcdAndCache() throws InterruptedException {
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
expectedDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
Thread.sleep(100);
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalCapabilitiesDirectoryClient,
never()).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(globalDiscoveryEntryCacheMock, never()).add(ArgumentMatchers.<GlobalDiscoveryEntry> any());
}
@Test(timeout = TEST_TIMEOUT)
public void addGlobalCapSucceeds_NextAddShallAddGlobalAgain() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
remainingTtlCapture.capture(),
ArgumentMatchers.<String[]> any());
checkRemainingTtl(remainingTtlCapture);
Thread.sleep(1); // make sure that the lastSeenDate of expected entry 2 is larger than the lastSeenDateMs of expected entry 1
DiscoveryEntry expectedDiscoveryEntry2 = new DiscoveryEntry(expectedDiscoveryEntry);
expectedDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
GlobalDiscoveryEntry expectedGlobalDiscoveryEntry2 = new GlobalDiscoveryEntry(expectedGlobalDiscoveryEntry);
expectedGlobalDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
verify(localDiscoveryEntryStoreMock,
times(0)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
doReturn(true).when(localDiscoveryEntryStoreMock).hasDiscoveryEntry(any(DiscoveryEntry.class));
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(anyString(), anyLong());
Promise<DeferredVoid> promise2 = localCapabilitiesDirectory.add(discoveryEntry2, awaitGlobalRegistration);
checkPromiseSuccess(promise2, "add failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry2)),
remainingTtlCapture.capture(),
ArgumentMatchers.<String[]> any());
checkRemainingTtl(remainingTtlCapture);
}
@Test(timeout = TEST_TIMEOUT)
public void addGlobalCapFails_NextAddShallAddGlobalAgain() throws InterruptedException {
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.GLOBAL);
String participantId = LocalCapabilitiesDirectoryTest.class.getName() + ".addLocalAndThanGlobalShallWork";
String domain = "testDomain";
final DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain,
INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
final DiscoveryEntry expectedDiscoveryEntry = new DiscoveryEntry(discoveryEntry);
globalDiscoveryEntry = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
ProviderRuntimeException exception = new ProviderRuntimeException("add failed");
doAnswer(createVoidAnswerWithException(exception)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseException(promise, new ProviderRuntimeException(exception.toString()));
ArgumentCaptor<Long> remainingTtlCaptor = ArgumentCaptor.forClass(Long.class);
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
remainingTtlCaptor.capture(),
ArgumentMatchers.<String[]> any());
checkRemainingTtl(remainingTtlCaptor);
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
reset(globalCapabilitiesDirectoryClient, localDiscoveryEntryStoreMock);
doAnswer(createAnswerWithSuccess()).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
DiscoveryEntry expectedDiscoveryEntry2 = new DiscoveryEntry(expectedDiscoveryEntry);
expectedDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
GlobalDiscoveryEntry expectedGlobalDiscoveryEntry2 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
expectedGlobalDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry2)),
remainingTtlCaptor.capture(),
ArgumentMatchers.<String[]> any());
checkRemainingTtl(remainingTtlCaptor);
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
}
private void testAddWithGbidsIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = true;
String[] gbids = new String[]{ knownGbids[0] };
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids);
checkPromiseError(promise, expectedError);
}
private void testAddIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseErrorInProviderRuntimeException(promise, expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbidsIsProperlyRejected_invalidGbid() throws InterruptedException {
testAddWithGbidsIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbidsIsProperlyRejected_unknownGbid() throws InterruptedException {
testAddWithGbidsIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbidsIsProperlyRejected_internalError() throws InterruptedException {
testAddWithGbidsIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddIsProperlyRejected_invalidGbid() throws InterruptedException {
testAddIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddIsProperlyRejected_unknownGbid() throws InterruptedException {
testAddIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddIsProperlyRejected_internalError() throws InterruptedException {
testAddIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
private void testAddReturnsDiscoveryError(String[] gbids,
DiscoveryError expectedError) throws InterruptedException {
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, true, gbids);
checkPromiseError(promise, expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_unknownGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], "unknown" };
testAddReturnsDiscoveryError(gbids, DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_invalidGbid_emptyGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], "" };
testAddReturnsDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_invalidGbid_duplicateGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], knownGbids[1] };
testAddReturnsDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_invalidGbid_nullGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], null };
testAddReturnsDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGbids_invalidGbid_nullGbidArray() throws InterruptedException {
String[] gbids = null;
testAddReturnsDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withoutAwaitGlobalRegistration_retryAfterTimeout() throws InterruptedException {
CountDownLatch cdl = new CountDownLatch(2);
doAnswer(createVoidAnswerWithException(cdl,
new JoynrTimeoutException(0))).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = false;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
atLeast(2)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any());
checkPromiseSuccess(promise, "add failed");
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withAwaitGlobalRegistration_noRetryAfterTimeout() throws InterruptedException {
JoynrTimeoutException timeoutException = new JoynrTimeoutException(0);
ProviderRuntimeException expectedException = new ProviderRuntimeException(timeoutException.toString());
doAnswer(createVoidAnswerWithException(timeoutException)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseException(promise, expectedException);
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any());
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withoutAwaitGlobalRegistration_noRetryAfterRuntimeException() throws InterruptedException {
JoynrRuntimeException runtimeException = new JoynrRuntimeException("custom runtime exception");
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithException(cdl,
runtimeException)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = false;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any());
verify(localDiscoveryEntryStoreMock, times(0)).remove(eq(globalDiscoveryEntry.getParticipantId()));
checkPromiseSuccess(promise, "add failed");
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withoutAwaitGlobalRegistration_noRetryAfterDiscoveryError() throws InterruptedException {
DiscoveryError expectedError = DiscoveryError.UNKNOWN_GBID;
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithDiscoveryError(cdl,
expectedError)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
final boolean awaitGlobalRegistration = false;
Promise<DeferredVoid> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
any());
verify(localDiscoveryEntryStoreMock, times(0)).remove(eq(globalDiscoveryEntry.getParticipantId()));
checkPromiseSuccess(promise, "add failed");
}
private void globalAddUsesCorrectRemainingTtl(boolean awaitGlobalRegistration) throws InterruptedException {
int defaultTtl = MessagingQos.DEFAULT_TTL;
DiscoveryEntry discoveryEntry1 = new DiscoveryEntry(discoveryEntry);
discoveryEntry1.setParticipantId("participantId1");
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
discoveryEntry2.setParticipantId("participantId2");
GlobalDiscoveryEntry globalDiscoveryEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry1,
globalAddress1);
GlobalDiscoveryEntry globalDiscoveryEntry2 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry2,
globalAddress1);
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
CountDownLatch startOfFirstAddCdl = new CountDownLatch(1);
CountDownLatch endOfFirstAddCdl = new CountDownLatch(1);
long sleepTime = 1000l;
doAnswer(createAnswerWithDelayedSuccess(startOfFirstAddCdl,
endOfFirstAddCdl,
sleepTime)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry1)),
anyLong(),
ArgumentMatchers.<String[]> any());
CountDownLatch secondAddCdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(secondAddCdl)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry2)),
anyLong(),
ArgumentMatchers.<String[]> any());
localCapabilitiesDirectory.add(discoveryEntry1, awaitGlobalRegistration);
localCapabilitiesDirectory.add(discoveryEntry2, awaitGlobalRegistration);
assertTrue(startOfFirstAddCdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry1)),
remainingTtlCapture.capture(),
any());
long firstNow = System.currentTimeMillis();
long capturedFirstAddRemainingTtl = remainingTtlCapture.getValue();
assertTrue(endOfFirstAddCdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
assertTrue(secondAddCdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry2)),
remainingTtlCapture.capture(),
any());
long secondNow = System.currentTimeMillis();
long delta = secondNow - firstNow;
long capturedSecondAddRemainingTtl = remainingTtlCapture.getValue();
long epsilon = 300;
if (awaitGlobalRegistration) {
assertTrue(capturedFirstAddRemainingTtl <= defaultTtl);
assertTrue(capturedFirstAddRemainingTtl > defaultTtl - epsilon);
assertTrue(capturedSecondAddRemainingTtl <= defaultTtl - delta + epsilon);
assertTrue(capturedSecondAddRemainingTtl > defaultTtl - delta - epsilon);
} else {
assertEquals(capturedFirstAddRemainingTtl, defaultTtl);
assertEquals(capturedSecondAddRemainingTtl, defaultTtl);
}
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withAwaitGlobalRegistration_usesCorrectRemainingTtl() throws InterruptedException {
globalAddUsesCorrectRemainingTtl(true);
}
@Test(timeout = TEST_TIMEOUT)
public void globalAdd_withoutAwaitGlobalRegistration_usesCorrectRemainingTtl() throws InterruptedException {
globalAddUsesCorrectRemainingTtl(false);
}
@Test(timeout = TEST_TIMEOUT)
public void addSameGbidTwiceInARow() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
String[] gbids = new String[]{ knownGbids[0] };
String[] expectedGbids = gbids.clone();
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
anyLong(),
eq(expectedGbids));
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids);
checkPromiseSuccess(promise, "add failed");
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
ArgumentCaptor<Long> remainingTtlCaptor = ArgumentCaptor.forClass(Long.class);
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
remainingTtlCaptor.capture(),
eq(expectedGbids));
checkRemainingTtl(remainingTtlCaptor);
Thread.sleep(1); // make sure that the lastSeenDate of expected entry 2 is larger than the lastSeenDateMs of expected entry 1
DiscoveryEntry expectedDiscoveryEntry2 = new DiscoveryEntry(expectedDiscoveryEntry);
expectedDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
GlobalDiscoveryEntry expectedGlobalDiscoveryEntry2 = new GlobalDiscoveryEntry(expectedGlobalDiscoveryEntry);
expectedGlobalDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
doReturn(true).when(localDiscoveryEntryStoreMock).hasDiscoveryEntry(any(DiscoveryEntry.class));
doReturn(Optional.of(globalDiscoveryEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(expectedDiscoveryEntry.getParticipantId()), anyLong());
Promise<Add1Deferred> promise2 = localCapabilitiesDirectory.add(discoveryEntry2,
awaitGlobalRegistration,
gbids);
checkPromiseSuccess(promise2, "add failed");
// entry is added again (with newer lastSeenDateMs)
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry2)),
remainingTtlCaptor.capture(),
eq(expectedGbids));
checkRemainingTtl(remainingTtlCaptor);
}
@Test(timeout = TEST_TIMEOUT)
public void addDifferentGbidsAfterEachOther() throws InterruptedException {
final boolean awaitGlobalRegistration = true;
String[] gbids1 = new String[]{ knownGbids[0] };
String[] expectedGbids1 = gbids1.clone();
String[] gbids2 = new String[]{ knownGbids[1] };
String[] expectedGbids2 = gbids2.clone();
DiscoveryEntryWithMetaInfo expectedEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
discoveryEntry);
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
doAnswer(createAnswerWithSuccess()).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration, gbids1);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
ArgumentCaptor<Long> remainingTtlCaptor = ArgumentCaptor.forClass(Long.class);
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
remainingTtlCaptor.capture(),
eq(expectedGbids1));
checkRemainingTtl(remainingTtlCaptor);
Thread.sleep(1); // make sure that the lastSeenDate of expected entry 2 is larger than the lastSeenDateMs of expected entry 1
DiscoveryEntry expectedDiscoveryEntry2 = new DiscoveryEntry(expectedDiscoveryEntry);
expectedDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
GlobalDiscoveryEntry expectedGlobalDiscoveryEntry2 = new GlobalDiscoveryEntry(expectedGlobalDiscoveryEntry);
expectedGlobalDiscoveryEntry2.setLastSeenDateMs(System.currentTimeMillis());
verify(localDiscoveryEntryStoreMock,
times(0)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
doReturn(true).when(localDiscoveryEntryStoreMock).hasDiscoveryEntry(any(DiscoveryEntry.class));
Promise<Add1Deferred> promise2 = localCapabilitiesDirectory.add(discoveryEntry2,
awaitGlobalRegistration,
gbids2);
checkPromiseSuccess(promise2, "add failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry2)));
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry2)),
remainingTtlCaptor.capture(),
eq(expectedGbids2));
checkRemainingTtl(remainingTtlCaptor);
// provider is now registered for both GBIDs
doReturn(Arrays.asList(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookupGlobalEntries(eq(new String[]{
expectedDiscoveryEntry.getDomain() }), eq(INTERFACE_NAME));
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
Promise<Lookup2Deferred> promiseLookup1 = localCapabilitiesDirectory.lookup(new String[]{
expectedDiscoveryEntry.getDomain() }, expectedDiscoveryEntry.getInterfaceName(), discoveryQos, gbids1);
Promise<Lookup2Deferred> promiseLookup2 = localCapabilitiesDirectory.lookup(new String[]{
expectedDiscoveryEntry.getDomain() }, expectedDiscoveryEntry.getInterfaceName(), discoveryQos, gbids2);
DiscoveryEntryWithMetaInfo[] result1 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup1,
"lookup failed")[0];
assertEquals(1, result1.length);
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntryWithMetaInfo, result1[0]));
DiscoveryEntryWithMetaInfo[] result2 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup2,
"lookup failed")[0];
assertEquals(1, result2.length);
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntryWithMetaInfo, result2[0]));
verify(globalCapabilitiesDirectoryClient,
times(0)).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
ArgumentMatchers.<String[]> any(),
anyString(),
anyLong(),
ArgumentMatchers.<String[]> any());
}
void checkAddRemovesCachedEntryWithSameParticipantId(ProviderScope scope) throws InterruptedException {
discoveryEntry.getQos().setScope(scope);
expectedDiscoveryEntry.getQos().setScope(scope);
doReturn(false).when(localDiscoveryEntryStoreMock)
.hasDiscoveryEntry(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
doReturn(Optional.of(globalDiscoveryEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(expectedDiscoveryEntry.getParticipantId()),
eq(Long.MAX_VALUE));
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock).hasDiscoveryEntry(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(localDiscoveryEntryStoreMock, never()).lookup(any(), any());
verify(globalDiscoveryEntryCacheMock, times(1)).lookup(eq(expectedGlobalDiscoveryEntry.getParticipantId()),
eq(Long.MAX_VALUE));
verify(globalDiscoveryEntryCacheMock, times(1)).remove(eq(expectedGlobalDiscoveryEntry.getParticipantId()));
int calls = (scope == ProviderScope.GLOBAL ? 1 : 0);
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient, times(calls)).add(any(), any(), anyLong(), any());
}
@Test(timeout = TEST_TIMEOUT)
public void add_removesCachedEntryWithSameParticipantId_ProviderScope_LOCAL() throws InterruptedException {
checkAddRemovesCachedEntryWithSameParticipantId(ProviderScope.LOCAL);
}
@Test(timeout = TEST_TIMEOUT)
public void add_removesCachedEntryWithSameParticipantId_ProviderScope_GLOBAL() throws InterruptedException {
checkAddRemovesCachedEntryWithSameParticipantId(ProviderScope.GLOBAL);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddKnownLocalEntryDoesNothing() throws InterruptedException {
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
expectedDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
doReturn(true).when(localDiscoveryEntryStoreMock)
.hasDiscoveryEntry(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(expectedDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(discoveryEntry, false, knownGbids);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock).hasDiscoveryEntry(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(localDiscoveryEntryStoreMock).lookup(eq(expectedDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
verify(globalDiscoveryEntryCacheMock, never()).lookup(anyString(), anyLong());
verify(globalDiscoveryEntryCacheMock, never()).remove(anyString());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient, never()).add(any(), any(), anyLong(), any());
}
@Test(timeout = TEST_TIMEOUT)
public void testAddKnownLocalEntryWithDifferentExpiryDateAddsAgain() throws InterruptedException {
DiscoveryEntry newDiscoveryEntry = new DiscoveryEntry(discoveryEntry);
newDiscoveryEntry.setExpiryDateMs(discoveryEntry.getExpiryDateMs() + 1);
newDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
doReturn(true).when(localDiscoveryEntryStoreMock).hasDiscoveryEntry(newDiscoveryEntry);
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(newDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(newDiscoveryEntry, false, knownGbids);
checkPromiseSuccess(promise, "add failed");
verify(localDiscoveryEntryStoreMock).hasDiscoveryEntry(newDiscoveryEntry);
verify(localDiscoveryEntryStoreMock).lookup(eq(newDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
verify(localDiscoveryEntryStoreMock).add(eq(newDiscoveryEntry));
// check whether the local entry is in the global cache (unlikely). If so, then remove it
verify(globalDiscoveryEntryCacheMock, times(1)).lookup(anyString(), anyLong());
verify(globalDiscoveryEntryCacheMock, never()).remove(anyString());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient, never()).add(any(), any(), anyLong(), any());
}
@Test(timeout = TEST_TIMEOUT)
public void testAddWithGlobalAddressProviderThrowingException() throws InterruptedException {
when(globalAddressProvider.get()).thenThrow(new JoynrRuntimeException());
final boolean awaitGlobalRegistration = true;
localCapabilitiesDirectory.add(globalDiscoveryEntry, awaitGlobalRegistration, knownGbids);
verify(globalAddressProvider).registerGlobalAddressesReadyListener((TransportReadyListener) localCapabilitiesDirectory);
verify(globalDiscoveryEntryCacheMock, times(0)).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient, times(0)).add(any(), any(), anyLong(), any());
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAll() throws InterruptedException {
boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
checkPromiseSuccess(promise, "addToAll failed");
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedGlobalDiscoveryEntry)),
remainingTtlCapture.capture(),
eq(knownGbids));
checkRemainingTtl(remainingTtlCapture);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAll_local() throws InterruptedException {
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
expectedDiscoveryEntry.getQos().setScope(ProviderScope.LOCAL);
boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "addToAll failed");
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(globalCapabilitiesDirectoryClient,
never()).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(GlobalDiscoveryEntry.class),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(localDiscoveryEntryStoreMock,
times(1)).add(argThat(new DiscoveryEntryWithUpdatedLastSeenDateMsMatcher(expectedDiscoveryEntry)));
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAllIsProperlyRejected_exception() throws InterruptedException {
JoynrRuntimeException exception = new JoynrRuntimeException("add failed");
ProviderRuntimeException expectedException = new ProviderRuntimeException(exception.toString());
doAnswer(createVoidAnswerWithException(exception)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry, true);
checkPromiseException(promise, expectedException);
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
}
private void testAddToAllIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithUpdatedLastSeenDateMsMatcher(globalDiscoveryEntry)),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry, true);
checkPromiseError(promise, expectedError);
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).add(any(DiscoveryEntry.class));
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAllIsProperlyRejected_internalError() throws InterruptedException {
DiscoveryError expectedError = DiscoveryError.INTERNAL_ERROR;
testAddToAllIsProperlyRejected(expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAllIsProperlyRejected_invalidGbid() throws InterruptedException {
DiscoveryError expectedError = DiscoveryError.INVALID_GBID;
testAddToAllIsProperlyRejected(expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testAddToAllIsProperlyRejected_unknownGbid() throws InterruptedException {
DiscoveryError expectedError = DiscoveryError.UNKNOWN_GBID;
testAddToAllIsProperlyRejected(expectedError);
}
private static Answer<Future<List<GlobalDiscoveryEntry>>> createLookupAnswer(final List<GlobalDiscoveryEntry> caps) {
return new Answer<Future<List<GlobalDiscoveryEntry>>>() {
@Override
public Future<List<GlobalDiscoveryEntry>> answer(InvocationOnMock invocation) throws Throwable {
Future<List<GlobalDiscoveryEntry>> result = new Future<List<GlobalDiscoveryEntry>>();
@SuppressWarnings("unchecked")
Callback<List<GlobalDiscoveryEntry>> callback = (Callback<List<GlobalDiscoveryEntry>>) invocation.getArguments()[0];
callback.onSuccess(caps);
result.onSuccess(caps);
return result;
}
};
}
private static Answer<Future<GlobalDiscoveryEntry>> createLookupAnswer(final GlobalDiscoveryEntry caps) {
return new Answer<Future<GlobalDiscoveryEntry>>() {
@Override
public Future<GlobalDiscoveryEntry> answer(InvocationOnMock invocation) throws Throwable {
Future<GlobalDiscoveryEntry> result = new Future<GlobalDiscoveryEntry>();
@SuppressWarnings("unchecked")
Callback<GlobalDiscoveryEntry> callback = (Callback<GlobalDiscoveryEntry>) invocation.getArguments()[0];
callback.onSuccess(caps);
result.onSuccess(caps);
return result;
}
};
}
private static Answer<Void> createAnswerWithSuccess() {
return new Answer<Void>() {
@SuppressWarnings("unchecked")
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
((Callback<Void>) args[0]).onSuccess(null);
return null;
}
};
}
private static Answer<Void> createAnswerWithSuccess(CountDownLatch cdl) {
return new Answer<Void>() {
@SuppressWarnings("unchecked")
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
((Callback<Void>) args[0]).onSuccess(null);
cdl.countDown();
return null;
}
};
}
private static Answer<Void> createAnswerWithDelayedSuccess(CountDownLatch cdlStart,
CountDownLatch cdlDone,
long delay) {
return new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
new Thread(new Runnable() {
@Override
public void run() {
cdlStart.countDown();
try {
Thread.sleep(delay);
} catch (Exception e) {
fail("SLEEP INTERRUPTED");
}
callback.onSuccess(null);
cdlDone.countDown();
}
}).start();
return null;
}
};
}
private static Answer<Void> createVoidAnswerWithException(JoynrRuntimeException exception) {
CountDownLatch cdl = new CountDownLatch(0);
return createVoidAnswerWithException(cdl, exception);
}
private static Answer<Void> createVoidAnswerWithDiscoveryError(DiscoveryError error) {
CountDownLatch cdl = new CountDownLatch(0);
return createVoidAnswerWithDiscoveryError(cdl, error);
}
private static Answer<Void> createVoidAnswerWithException(CountDownLatch cdl, JoynrRuntimeException exception) {
return new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
callback.onFailure(exception);
cdl.countDown();
return null;
}
};
}
private static Answer<Void> createVoidAnswerWithDiscoveryError(CountDownLatch cdl, DiscoveryError error) {
return new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
@SuppressWarnings("unchecked")
CallbackWithModeledError<Void, DiscoveryError> callback = ((CallbackWithModeledError<Void, DiscoveryError>) args[0]);
callback.onFailure(error);
cdl.countDown();
return null;
}
};
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterface_globalOnly() throws InterruptedException {
List<GlobalDiscoveryEntry> caps = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String[] domains = new String[]{ domain1 };
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 1000L, DiscoveryScope.GLOBAL_ONLY, false);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(new ArrayList<GlobalDiscoveryEntry>());
doAnswer(createLookupAnswer(caps)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
0);
verify(routingTable, never()).incrementReferenceCount(any());
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
"localParticipant",
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(promiseAdd, "add failed");
Promise<Lookup1Deferred> promise2 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(2,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise2,
0);
verify(routingTable, never()).incrementReferenceCount(any());
// even deleting local cap entries shall have no effect, the global cap dir shall be invoked
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry));
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
verify(localDiscoveryEntryStoreMock).remove(discoveryEntry.getParticipantId());
Promise<Lookup1Deferred> promise3 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(3,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise3,
0);
verify(routingTable, never()).incrementReferenceCount(any());
// add global entry
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
caps.add(capInfo);
doAnswer(createLookupAnswer(caps)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise4 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(4,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise4,
1); // 1 global entry
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
verify(routingTable, never()).incrementReferenceCount(any());
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
reset((Object) globalDiscoveryEntryCacheMock);
reset((Object) routingTable);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Arrays.asList(capInfo));
Promise<Lookup1Deferred> promise5 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(4,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise5,
1); // 1 cached entry
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
verify(routingTable, never()).incrementReferenceCount(any());
reset((Object) routingTable);
// and now, invalidate the existing cached global values, resulting in another call to globalcapclient
discoveryQos.setCacheMaxAge(0L);
Thread.sleep(1);
// now, another lookup call shall call the globalCapabilitiesDirectoryClient, as the global cap dir is expired
Promise<Lookup1Deferred> promise6 = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(5,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise6,
1); // 1 global entry
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
verify(routingTable, never()).incrementReferenceCount(any());
reset(globalCapabilitiesDirectoryClient);
}
private Object[] verifyGcdLookupAndPromiseFulfillment(int gcdTimesCalled,
String[] domains,
String interfaceName,
long discoveryTimeout,
String[] gbids,
Promise<?> promise,
int numberOfReturnedValues) throws InterruptedException {
verify(globalCapabilitiesDirectoryClient,
times(gcdTimesCalled)).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(interfaceName),
eq(discoveryTimeout),
eq(gbids));
Object[] values = checkPromiseSuccess(promise, "Unexpected rejection in global lookup");
assertEquals(numberOfReturnedValues, ((DiscoveryEntryWithMetaInfo[]) values[0]).length);
return values;
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterface_localThenGlobal() throws InterruptedException {
List<GlobalDiscoveryEntry> caps = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String[] domains = new String[]{ domain1 };
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 1000L, DiscoveryScope.LOCAL_THEN_GLOBAL, false);
doAnswer(createLookupAnswer(caps)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
0);
verify(routingTable, never()).incrementReferenceCount(any());
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
String localParticipantId = "localParticipant";
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
localParticipantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
reset((Object) localDiscoveryEntryStoreMock);
when(localDiscoveryEntryStoreMock.lookup(eq(domains),
eq(interfaceName1))).thenReturn(Arrays.asList(discoveryEntry));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 local entry
verify(routingTable, times(1)).incrementReferenceCount(eq(localParticipantId));
// add global entry
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
caps.add(capInfo);
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 local entry
verify(routingTable, times(2)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, never()).put(anyString(), any(Address.class), eq(true), anyLong());
// without local entry, the global cap dir is called
reset((Object) localDiscoveryEntryStoreMock);
when(localDiscoveryEntryStoreMock.lookup(anyString(), anyLong())).thenReturn(Optional.empty());
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(2,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 global entry
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Arrays.asList(capInfo));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(2,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 cached entry
verify(routingTable, times(2)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
// and now, invalidate the existing cached global values, resulting in another call to globalcapclient
discoveryQos.setCacheMaxAge(0L);
Thread.sleep(1);
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(3,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 global entry
verify(routingTable, times(3)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantId_defaultScopelocalAndGlobal_localEntry() throws InterruptedException {
DiscoveryQos discoveryQos = new DiscoveryQos(Long.MAX_VALUE,
Long.MAX_VALUE,
DiscoveryScope.LOCAL_AND_GLOBAL,
false);
DiscoveryEntryWithMetaInfo expectedDiscoveryEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
expectedDiscoveryEntry);
// add locally registered entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
when(localDiscoveryEntryStoreMock.lookup(eq(expectedDiscoveryEntry.getParticipantId()),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Optional.of(discoveryEntry));
reset(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
Promise<Lookup3Deferred> lookupPromise = localCapabilitiesDirectory.lookup(expectedDiscoveryEntry.getParticipantId());
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo retrievedCapabilityEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(expectedDiscoveryEntryWithMetaInfo, retrievedCapabilityEntry);
verify(localDiscoveryEntryStoreMock).lookup(eq(expectedDiscoveryEntry.getParticipantId()), eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
verify(routingTable, times(1)).incrementReferenceCount(eq(discoveryEntry.getParticipantId()));
verify(routingTable, never()).put(eq(discoveryEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterface_localAndGlobal() throws InterruptedException {
List<GlobalDiscoveryEntry> globalEntries = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String[] domains = new String[]{ domain1 };
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.LOCAL_AND_GLOBAL, false);
doAnswer(createLookupAnswer(globalEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
0);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
// add local entry
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.LOCAL);
String localParticipantId = "localParticipant";
DiscoveryEntry discoveryEntry = new DiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
localParticipantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
when(localDiscoveryEntryStoreMock.lookup(eq(domains),
eq(interfaceName1))).thenReturn(Arrays.asList(discoveryEntry));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(2,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1); // 1 local entry
verify(routingTable, times(1)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, never()).put(anyString(), any(Address.class), eq(true), anyLong());
// add global entry
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
globalEntries.add(capInfo);
doAnswer(createLookupAnswer(globalEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(3,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 1 local, 1 global entry
verify(routingTable, times(2)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, times(1)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
// now, another lookup call shall take the cached for the global cap call, and no longer call the global cap dir
// (as long as the cache is not expired)
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName1),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Arrays.asList(capInfo));
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(3,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 1 local, 1 cached entry
verify(routingTable, times(3)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, times(2)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
// and now, invalidate the existing cached global values, resulting in another call to glocalcapclient
discoveryQos.setCacheMaxAge(0L);
Thread.sleep(1);
promise = localCapabilitiesDirectory.lookup(domains, interfaceName1, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(4,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 1 local, 1 global entry
verify(routingTable, times(4)).incrementReferenceCount(eq(localParticipantId));
verify(routingTable, times(3)).put(eq(globalParticipantId), any(Address.class), eq(true), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterface_emptyGbid_replacesReturnedGbidsWithEmpty() throws InterruptedException {
String[] gbids = new String[]{ "" };
LocalCapabilitiesDirectoryImpl localCapabilitiesDirectoryWithEmptyGbids = new LocalCapabilitiesDirectoryImpl(capabilitiesProvisioning,
globalAddressProvider,
localDiscoveryEntryStoreMock,
globalDiscoveryEntryCacheMock,
routingTable,
globalCapabilitiesDirectoryClient,
expiredDiscoveryEntryCacheCleaner,
freshnessUpdateIntervalMs,
capabilitiesFreshnessUpdateExecutor,
shutdownNotifier,
gbids,
DEFAULT_EXPIRY_TIME_MS);
List<GlobalDiscoveryEntry> globalEntries = new ArrayList<GlobalDiscoveryEntry>();
String domain1 = "domain1";
String[] domains = new String[]{ domain1 };
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.LOCAL_AND_GLOBAL, false);
// add global entries
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
globalEntries.add(capInfo);
String globalParticipantId2 = "globalParticipant2";
GlobalDiscoveryEntry capInfo2 = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId2,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
globalEntries.add(capInfo2);
doAnswer(createLookupAnswer(globalEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(new String[]{ domain1 }),
eq(interfaceName1),
eq(discoveryQos.getDiscoveryTimeout()),
eq(gbids));
Promise<Lookup2Deferred> promise = localCapabilitiesDirectoryWithEmptyGbids.lookup(domains,
interfaceName1,
discoveryQos,
new String[]{});
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName1,
discoveryQos.getDiscoveryTimeout(),
gbids,
promise,
2);
verify(routingTable, never()).incrementReferenceCount(any());
ArgumentCaptor<Address> addressCaptor = ArgumentCaptor.forClass(Address.class);
verify(routingTable, times(1)).put(eq(globalParticipantId), addressCaptor.capture(), eq(true), anyLong());
MqttAddress address = (MqttAddress) addressCaptor.getValue();
assertEquals(gbids[0], address.getBrokerUri());
verify(routingTable, times(1)).put(eq(globalParticipantId2), addressCaptor.capture(), eq(true), anyLong());
address = (MqttAddress) addressCaptor.getValue();
assertEquals(gbids[0], address.getBrokerUri());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantId_emptyGbid_replacesReturnedGbidsWithEmpty() throws InterruptedException {
String[] gbids = new String[]{ "" };
LocalCapabilitiesDirectoryImpl localCapabilitiesDirectoryWithEmptyGbids = new LocalCapabilitiesDirectoryImpl(capabilitiesProvisioning,
globalAddressProvider,
localDiscoveryEntryStoreMock,
globalDiscoveryEntryCacheMock,
routingTable,
globalCapabilitiesDirectoryClient,
expiredDiscoveryEntryCacheCleaner,
freshnessUpdateIntervalMs,
capabilitiesFreshnessUpdateExecutor,
shutdownNotifier,
gbids,
DEFAULT_EXPIRY_TIME_MS);
String domain1 = "domain1";
String interfaceName1 = "interfaceName1";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.LOCAL_AND_GLOBAL, false);
// add global entry
String globalParticipantId = "globalParticipant";
GlobalDiscoveryEntry capInfo = new GlobalDiscoveryEntry(new Version(47, 11),
domain1,
interfaceName1,
globalParticipantId,
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
doAnswer(createLookupAnswer(capInfo)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(globalParticipantId),
eq(discoveryQos.getDiscoveryTimeout()),
eq(gbids));
Promise<Lookup4Deferred> promise = localCapabilitiesDirectoryWithEmptyGbids.lookup(globalParticipantId,
discoveryQos,
new String[]{});
checkPromiseSuccess(promise, "lookup failed");
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(globalParticipantId),
eq(discoveryQos.getDiscoveryTimeout()),
eq(gbids));
verify(routingTable, never()).incrementReferenceCount(any());
ArgumentCaptor<Address> addressCaptor = ArgumentCaptor.forClass(Address.class);
verify(routingTable, times(1)).put(eq(globalParticipantId), addressCaptor.capture(), eq(true), anyLong());
MqttAddress address = (MqttAddress) addressCaptor.getValue();
assertEquals(gbids[0], address.getBrokerUri());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_globalOnly_filtersRemoteCachedEntriesByGbids() throws InterruptedException {
String domain = "domain";
String[] domainsForLookup = new String[]{ domain };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
GlobalDiscoveryEntry cachedEntryForGbid1 = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
interfaceName,
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
GlobalDiscoveryEntry cachedEntryForGbid2 = new GlobalDiscoveryEntry(cachedEntryForGbid1);
cachedEntryForGbid2.setParticipantId("participantId2");
cachedEntryForGbid2.setAddress(globalAddress2Serialized);
DiscoveryEntryWithMetaInfo expectedEntry1 = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedEntryForGbid1);
DiscoveryEntryWithMetaInfo expectedEntry2 = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedEntryForGbid2);
doReturn(Arrays.asList(cachedEntryForGbid1, cachedEntryForGbid2)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domainsForLookup),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()));
Promise<Lookup2Deferred> promise1 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
new String[]{ knownGbids[1] });
DiscoveryEntryWithMetaInfo[] result1 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promise1,
"lookup failed")[0];
verify(routingTable, times(1)).put(eq(expectedEntry2.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, never()).put(eq(expectedEntry1.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, never()).incrementReferenceCount(anyString());
assertEquals(1, result1.length);
assertEquals(expectedEntry2, result1[0]);
reset((Object) routingTable);
Promise<Lookup2Deferred> promise2 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
new String[]{ knownGbids[0] });
DiscoveryEntryWithMetaInfo[] result2 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promise2,
"lookup failed")[0];
verify(routingTable, times(1)).put(eq(expectedEntry1.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, never()).put(eq(expectedEntry2.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, never()).incrementReferenceCount(anyString());
assertEquals(1, result2.length);
assertEquals(expectedEntry1, result2[0]);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_globalOnly_filtersLocalEntriesByGbids() throws InterruptedException {
String domain = "domain";
String[] domainsForLookup = new String[]{ domain };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
DiscoveryEntry localEntry1 = new DiscoveryEntry(new Version(47, 11),
domain,
interfaceName,
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
DiscoveryEntryWithMetaInfo expectedEntry1 = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry1);
DiscoveryEntry localEntry2 = new DiscoveryEntry(localEntry1);
localEntry2.setParticipantId("participantId2");
DiscoveryEntryWithMetaInfo expectedEntry2 = new DiscoveryEntryWithMetaInfo(expectedEntry1);
expectedEntry2.setParticipantId(localEntry2.getParticipantId());
doReturn(Arrays.asList(localEntry1, localEntry2)).when(localDiscoveryEntryStoreMock)
.lookupGlobalEntries(eq(domainsForLookup), eq(interfaceName));
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(localEntry1, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
promiseAdd = localCapabilitiesDirectory.add(localEntry2, true, new String[]{ knownGbids[1] });
checkPromiseSuccess(promiseAdd, "add failed");
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(localDiscoveryEntryStoreMock).add(eq(localEntry1));
verify(localDiscoveryEntryStoreMock).add(eq(localEntry2));
Promise<Lookup2Deferred> promiseLookup1 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
new String[]{ knownGbids[1] });
DiscoveryEntryWithMetaInfo[] result1 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup1,
"lookup failed")[0];
assertEquals(2, result1.length);
int actualEntry1 = expectedEntry1.getParticipantId().equals(result1[0].getParticipantId()) ? 0 : 1;
int actualEntry2 = (actualEntry1 + 1) % 2;
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry1, result1[actualEntry1]));
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry2, result1[actualEntry2]));
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedEntry1.getParticipantId()));
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedEntry2.getParticipantId()));
Promise<Lookup2Deferred> promiseLookup2 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
new String[]{ knownGbids[0] });
DiscoveryEntryWithMetaInfo[] result2 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup2,
"lookup failed")[0];
assertEquals(1, result2.length);
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry1, result2[0]));
verify(routingTable, times(2)).incrementReferenceCount(eq(expectedEntry1.getParticipantId()));
Promise<Lookup2Deferred> promiseLookup3 = localCapabilitiesDirectory.lookup(domainsForLookup,
interfaceName,
discoveryQos,
knownGbids);
DiscoveryEntryWithMetaInfo[] result3 = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promiseLookup3,
"lookup failed")[0];
assertEquals(2, result3.length);
actualEntry1 = expectedEntry1.getParticipantId().equals(result3[0].getParticipantId()) ? 0 : 1;
actualEntry2 = (actualEntry1 + 1) % 2;
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry1, result3[actualEntry1]));
assertTrue(discoveryEntriesWithMetaInfoMatchWithUpdatedLastSeenDate(expectedEntry2, result3[actualEntry2]));
verify(routingTable, times(3)).incrementReferenceCount(eq(expectedEntry1.getParticipantId()));
verify(routingTable, times(2)).incrementReferenceCount(eq(expectedEntry2.getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_globalOnly_filtersLocalEntriesByGbids() throws InterruptedException {
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
DiscoveryEntry localEntry = new DiscoveryEntry(new Version(47, 11),
"domain",
"interfaceName",
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
publicKeyId);
DiscoveryEntry localStoreEntry = new DiscoveryEntry(localEntry);
DiscoveryEntryWithMetaInfo expectedLocalEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry);
// register in knownGbids[1]
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(localEntry,
true,
new String[]{ knownGbids[1] });
checkPromiseSuccess(promiseAdd, "add failed");
reset((Object) localDiscoveryEntryStoreMock,
(Object) globalDiscoveryEntryCacheMock,
(Object) globalCapabilitiesDirectoryClient);
doReturn(Optional.of(localStoreEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(expectedLocalEntry.getParticipantId()), eq(Long.MAX_VALUE));
// lookup knownGbids[1], expect local entry
Promise<Lookup4Deferred> promiseLookup1 = localCapabilitiesDirectory.lookup(expectedLocalEntry.getParticipantId(),
discoveryQos,
new String[]{ knownGbids[1] });
DiscoveryEntryWithMetaInfo result1 = (DiscoveryEntryWithMetaInfo) checkPromiseSuccess(promiseLookup1,
"lookup failed")[0];
verify(localDiscoveryEntryStoreMock, times(1)).lookup(eq(expectedLocalEntry.getParticipantId()),
eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
assertEquals(expectedLocalEntry, result1);
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedLocalEntry.getParticipantId()));
// lookup knownGbids[0], expect DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS
Promise<Lookup4Deferred> promiseLookup2 = localCapabilitiesDirectory.lookup(expectedLocalEntry.getParticipantId(),
discoveryQos,
new String[]{ knownGbids[0] });
checkPromiseError(promiseLookup2, DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
verify(localDiscoveryEntryStoreMock, times(2)).lookup(eq(expectedLocalEntry.getParticipantId()),
eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedLocalEntry.getParticipantId()));
// lookup all gbids, expect local entry
Promise<Lookup4Deferred> promiseLookup3 = localCapabilitiesDirectory.lookup(expectedLocalEntry.getParticipantId(),
discoveryQos,
knownGbids);
DiscoveryEntryWithMetaInfo result3 = (DiscoveryEntryWithMetaInfo) checkPromiseSuccess(promiseLookup3,
"lookup failed")[0];
verify(localDiscoveryEntryStoreMock, times(3)).lookup(eq(expectedLocalEntry.getParticipantId()),
eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
assertEquals(expectedLocalEntry, result3);
verify(routingTable, times(2)).incrementReferenceCount(eq(expectedLocalEntry.getParticipantId()));
verify(routingTable, never()).put(eq(expectedLocalEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
private void testLookupByDomainInterfaceWithGbids_globalOnly_allLocal(String[] gbidsForLookup,
DiscoveryEntry entryForGbid1,
DiscoveryEntry entryForGbid2,
DiscoveryEntry entryForGbid3,
DiscoveryEntry entryForGbid2And3,
Set<String> expectedParticipantIds) throws InterruptedException {
String[] domainsForLookup = new String[]{ discoveryEntry.getDomain() };
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
final boolean awaitGlobalRegistration = true;
Promise<Add1Deferred> promise1 = localCapabilitiesDirectory.add(entryForGbid1,
awaitGlobalRegistration,
new String[]{ knownGbids[0] });
checkPromiseSuccess(promise1, "add failed");
Promise<Add1Deferred> promise2 = localCapabilitiesDirectory.add(entryForGbid2,
awaitGlobalRegistration,
new String[]{ knownGbids[1] });
checkPromiseSuccess(promise2, "add failed");
Promise<Add1Deferred> promise3 = localCapabilitiesDirectory.add(entryForGbid3,
awaitGlobalRegistration,
new String[]{ knownGbids[2] });
checkPromiseSuccess(promise3, "add failed");
Promise<Add1Deferred> promise4 = localCapabilitiesDirectory.add(entryForGbid2And3,
awaitGlobalRegistration,
new String[]{ knownGbids[1], knownGbids[2] });
checkPromiseSuccess(promise4, "add failed");
doReturn(Arrays.asList(entryForGbid1,
entryForGbid2,
entryForGbid3,
entryForGbid2And3)).when(localDiscoveryEntryStoreMock)
.lookupGlobalEntries(eq(domainsForLookup), eq(INTERFACE_NAME));
doReturn(new ArrayList<GlobalDiscoveryEntry>()).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domainsForLookup), eq(INTERFACE_NAME), anyLong());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domainsForLookup,
INTERFACE_NAME,
discoveryQos,
gbidsForLookup);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] foundEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(expectedParticipantIds.size(), foundEntries.length);
Set<String> foundParticipantIds = new HashSet<>();
for (DiscoveryEntryWithMetaInfo foundEntry : foundEntries) {
foundParticipantIds.add(foundEntry.getParticipantId());
}
assertEquals(expectedParticipantIds, foundParticipantIds);
expectedParticipantIds.forEach((participantId) -> {
verify(routingTable, times(1)).incrementReferenceCount(eq(participantId));
verify(routingTable, never()).put(eq(participantId), any(Address.class), any(Boolean.class), anyLong());
});
reset((Object) routingTable);
verify(globalCapabilitiesDirectoryClient,
times(0)).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
any(String[].class),
anyString(),
anyLong(),
any(String[].class));
}
private void testLookupByDomainInterfaceWithGbids_globalOnly_noneLocalOrCached(String[] gbidsForLookup,
String[] expectedGbids) throws InterruptedException {
String[] domainsForLookup = new String[]{ discoveryEntry.getDomain() };
String[] expectedDomains = domainsForLookup.clone();
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
List<GlobalDiscoveryEntry> globalEntries = new ArrayList<>();
globalEntries.add(globalDiscoveryEntry);
DiscoveryEntry entry2 = new DiscoveryEntry(discoveryEntry);
entry2.setParticipantId("participantId2");
globalEntries.add(CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(entry2, globalAddressWithoutGbid));
doReturn(new ArrayList<GlobalDiscoveryEntry>()).when(globalDiscoveryEntryCacheMock)
.lookup(eq(expectedDomains), eq(INTERFACE_NAME), anyLong());
doAnswer(createLookupAnswer(globalEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(expectedDomains),
eq(INTERFACE_NAME),
eq(discoveryQos.getDiscoveryTimeout()),
eq(expectedGbids));
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domainsForLookup,
INTERFACE_NAME,
discoveryQos,
gbidsForLookup);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(expectedDomains),
eq(INTERFACE_NAME),
eq(discoveryQos.getDiscoveryTimeout()),
eq(expectedGbids));
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] foundEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(2, foundEntries.length);
Arrays.asList(foundEntries)
.forEach((entry) -> verify(routingTable, times(1)).put(eq(entry.getParticipantId()),
any(Address.class),
eq(true),
anyLong()));
verify(routingTable, never()).incrementReferenceCount(anyString());
reset((Object) routingTable);
}
@Test
public void testLookupByDomainInterfaceWithGbids_globalOnly_multipleGbids_allCached() throws InterruptedException {
String[] gbidsForLookup = new String[]{ knownGbids[0], knownGbids[2] };
DiscoveryEntry entryForGbid1 = new DiscoveryEntry(discoveryEntry);
DiscoveryEntry entryForGbid2 = new DiscoveryEntry(discoveryEntry);
entryForGbid2.setParticipantId("participantId2");
DiscoveryEntry entryForGbid3 = new DiscoveryEntry(discoveryEntry);
entryForGbid3.setParticipantId("participantId3");
DiscoveryEntry entryForGbid2And3 = new DiscoveryEntry(discoveryEntry);
entryForGbid2And3.setParticipantId("participantId4");
Set<String> expectedParticipantIds = new HashSet<>();
expectedParticipantIds.add(entryForGbid1.getParticipantId());
expectedParticipantIds.add(entryForGbid3.getParticipantId());
expectedParticipantIds.add(entryForGbid2And3.getParticipantId());
testLookupByDomainInterfaceWithGbids_globalOnly_allLocal(gbidsForLookup,
entryForGbid1,
entryForGbid2,
entryForGbid3,
entryForGbid2And3,
expectedParticipantIds);
}
@Test
public void testLookupByDomainInterfaceWithGbids_globalOnly_emptyGbidsArray_allCached() throws InterruptedException {
String[] gbidsForLookup = new String[0];
DiscoveryEntry entryForGbid1 = new DiscoveryEntry(discoveryEntry);
DiscoveryEntry entryForGbid2 = new DiscoveryEntry(discoveryEntry);
entryForGbid2.setParticipantId("participantId2");
DiscoveryEntry entryForGbid3 = new DiscoveryEntry(discoveryEntry);
entryForGbid3.setParticipantId("participantId3");
DiscoveryEntry entryForGbid2And3 = new DiscoveryEntry(discoveryEntry);
entryForGbid2And3.setParticipantId("participantId4");
Set<String> expectedParticipantIds = new HashSet<>();
expectedParticipantIds.add(entryForGbid1.getParticipantId());
expectedParticipantIds.add(entryForGbid2.getParticipantId());
expectedParticipantIds.add(entryForGbid3.getParticipantId());
expectedParticipantIds.add(entryForGbid2And3.getParticipantId());
testLookupByDomainInterfaceWithGbids_globalOnly_allLocal(gbidsForLookup,
entryForGbid1,
entryForGbid2,
entryForGbid3,
entryForGbid2And3,
expectedParticipantIds);
}
@Test
public void testLookupByDomainInterfaceWithGbids_globalOnly_multipleGbids_noneCached() throws InterruptedException {
String[] gbidsForLookup = new String[]{ knownGbids[0], knownGbids[2] };
testLookupByDomainInterfaceWithGbids_globalOnly_noneLocalOrCached(gbidsForLookup, gbidsForLookup.clone());
String[] gbidsForLookup2 = new String[]{ knownGbids[2], knownGbids[0] };
testLookupByDomainInterfaceWithGbids_globalOnly_noneLocalOrCached(gbidsForLookup2, gbidsForLookup2.clone());
}
@Test
public void testLookupByDomainInterfaceWithGbids_globalOnly_emptyGbidsArray_noneCached() throws InterruptedException {
String[] gbidsForLookup = new String[0];
testLookupByDomainInterfaceWithGbids_globalOnly_noneLocalOrCached(gbidsForLookup, knownGbids);
}
private void testLookupByParticipantIdWithGbids_globalOnly_allCached(String[] gbidsForLookup) throws InterruptedException {
String participantId = discoveryEntry.getParticipantId();
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
doReturn(Optional.of(globalDiscoveryEntry)).when(globalDiscoveryEntryCacheMock).lookup(eq(participantId),
anyLong());
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
gbidsForLookup);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo foundEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(participantId, foundEntry.getParticipantId());
verify(routingTable, never()).incrementReferenceCount(eq(globalDiscoveryEntry.getParticipantId()));
verify(routingTable, times(1)).put(eq(globalDiscoveryEntry.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
reset((Object) routingTable);
verify(globalCapabilitiesDirectoryClient,
times(0)).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any(String[].class));
}
private void testLookupByParticipantIdWithGbids_globalOnly_noneCached(String[] gbidsForLookup,
String[] expectedGbids) throws InterruptedException {
String participantId = discoveryEntry.getParticipantId();
DiscoveryQos discoveryQos = new DiscoveryQos(30000L, 500L, DiscoveryScope.GLOBAL_ONLY, false);
doReturn(Optional.empty()).when(globalDiscoveryEntryCacheMock).lookup(eq(participantId), anyLong());
doAnswer(createLookupAnswer(globalDiscoveryEntry)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
eq(discoveryQos.getDiscoveryTimeout()),
eq(expectedGbids));
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
gbidsForLookup);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
eq(discoveryQos.getDiscoveryTimeout()),
eq(expectedGbids));
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo foundEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(participantId, foundEntry.getParticipantId());
verify(routingTable, times(1)).put(eq(foundEntry.getParticipantId()), any(Address.class), eq(true), anyLong());
verify(routingTable, never()).incrementReferenceCount(anyString());
reset((Object) routingTable);
}
@Test
public void testLookupByParticipantIdWithGbids_globalOnly_multipleGbids_allCached() throws InterruptedException {
String[] gbidsForLookup = new String[]{ knownGbids[0], knownGbids[2] };
testLookupByParticipantIdWithGbids_globalOnly_allCached(gbidsForLookup);
}
@Test
public void testLookupByParticipantIdWithGbids_globalOnly_emptyGbidsArray_allCached() throws InterruptedException {
String[] gbidsForLookup = new String[0];
testLookupByParticipantIdWithGbids_globalOnly_allCached(gbidsForLookup);
}
@Test
public void testLookupByParticipantIdWithGbids_globalOnly_multipleGbids_noneCached() throws InterruptedException {
String[] gbidsForLookup = new String[]{ knownGbids[0], knownGbids[2] };
testLookupByParticipantIdWithGbids_globalOnly_noneCached(gbidsForLookup, gbidsForLookup.clone());
String[] gbidsForLookup2 = new String[]{ knownGbids[2], knownGbids[0] };
testLookupByParticipantIdWithGbids_globalOnly_noneCached(gbidsForLookup2, gbidsForLookup2.clone());
}
@Test
public void testLookupByParticipantIdWithGbids_globalOnly_emptyGbidsArray_noneCached() throws InterruptedException {
String[] gbidsForLookup = new String[0];
testLookupByParticipantIdWithGbids_globalOnly_noneCached(gbidsForLookup, knownGbids);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_localOnly() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_ONLY);
Collection<DiscoveryEntry> entries = Arrays.asList(new DiscoveryEntry(new Version(0, 0),
"domain1",
interfaceName,
"participantId1",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
interfaceName),
new DiscoveryEntry(new Version(0, 0),
"domain2",
interfaceName,
"participantId2",
new ProviderQos(),
System.currentTimeMillis(),
expiryDateMs,
interfaceName));
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(entries);
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
Object[] values = checkPromiseSuccess(promise, "lookup failed");
assertEquals(2, ((DiscoveryEntryWithMetaInfo[]) values[0]).length);
entries.forEach((entry) -> {
verify(routingTable, times(1)).incrementReferenceCount(eq(entry.getParticipantId()));
verify(routingTable, never()).put(eq(entry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
});
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_globalOnly_noneCached() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(new ArrayList<GlobalDiscoveryEntry>());
doAnswer(createLookupAnswer(new ArrayList<GlobalDiscoveryEntry>())).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
0);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_globalOnly_allCached() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
List<GlobalDiscoveryEntry> entries = new ArrayList<>();
for (String domain : domains) {
GlobalDiscoveryEntry entry = new GlobalDiscoveryEntry();
entry.setParticipantId("participantIdFor-" + domain);
entry.setDomain(domain);
entry.setAddress(globalAddress1Serialized);
entries.add(entry);
}
when(globalDiscoveryEntryCacheMock.lookup(eq(domains),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(entries);
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(0,
domains,
interfaceName,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 2 cached entries
entries.forEach((entry) -> {
verify(routingTable, never()).incrementReferenceCount(eq(entry.getParticipantId()));
verify(routingTable, times(1)).put(eq(entry.getParticipantId()), any(Address.class), eq(true), anyLong());
});
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_globalOnly_allLocalGlobal() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
List<DiscoveryEntry> entries = new ArrayList<>();
List<Promise<Add1Deferred>> promises = new ArrayList<>();
for (String domain : domains) {
DiscoveryEntry entry = new GlobalDiscoveryEntry();
entry.setParticipantId("participantIdFor-" + domain);
entry.setDomain(domain);
entries.add(entry);
promises.add(localCapabilitiesDirectory.add(entry, true, knownGbids));
}
promises.forEach(promise -> {
try {
checkPromiseSuccess(promise, "addFailed");
} catch (InterruptedException e) {
fail("add failed: " + e);
}
});
doReturn(entries).when(localDiscoveryEntryStoreMock).lookupGlobalEntries(eq(domains), eq(interfaceName));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(0,
domains,
interfaceName,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
2); // 2 cached entries
entries.forEach((entry) -> {
verify(routingTable, times(1)).incrementReferenceCount(eq(entry.getParticipantId()));
verify(routingTable, never()).put(eq(entry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
});
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_globalOnly_oneCached() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
GlobalDiscoveryEntry entry = new GlobalDiscoveryEntry();
entry.setParticipantId("participantId1");
entry.setInterfaceName(interfaceName);
entry.setDomain(domains[0]);
entry.setAddress(globalAddress1Serialized);
doReturn(Arrays.asList(entry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAge()));
doReturn(Optional.of(entry)).when(globalDiscoveryEntryCacheMock).lookup(eq(entry.getParticipantId()),
eq(Long.MAX_VALUE));
doAnswer(createLookupAnswer(new ArrayList<GlobalDiscoveryEntry>())).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verifyGcdLookupAndPromiseFulfillment(1,
domains,
interfaceName,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
1);
verify(routingTable, never()).incrementReferenceCount(anyString());
verify(routingTable, times(1)).put(eq(entry.getParticipantId()), any(Address.class), eq(true), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupMultipleDomains_localThenGlobal_oneLocalGlobalOneCached_sameParticipantIdsRemote() throws InterruptedException {
String localDomain = "localDomain";
String cachedDomain = "cachedDomain";
String remoteDomain = "remoteDomain";
String[] domains = new String[]{ localDomain, cachedDomain, remoteDomain };
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
// local entry for participantId1 and domain1
discoveryEntry.setParticipantId("participantId1");
discoveryEntry.setDomain(localDomain);
doReturn(Arrays.asList(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(INTERFACE_NAME));
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(discoveryEntry.getParticipantId()), anyLong());
// cached entry for participantId2 for cachedDomain
GlobalDiscoveryEntry cachedRemoteEntry = new GlobalDiscoveryEntry();
cachedRemoteEntry.setParticipantId("participantId2");
cachedRemoteEntry.setInterfaceName(INTERFACE_NAME);
cachedRemoteEntry.setDomain(cachedDomain);
cachedRemoteEntry.setAddress(globalAddress1Serialized);
doReturn(Arrays.asList(cachedRemoteEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(INTERFACE_NAME),
eq(discoveryQos.getCacheMaxAge()));
// remote entries for local provider and for remoteDomain for participantIds 2 and 3
GlobalDiscoveryEntry remoteEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry,
globalAddressWithoutGbid);
remoteEntry1.setDomain(remoteDomain);
GlobalDiscoveryEntry remoteEntry2 = new GlobalDiscoveryEntry(cachedRemoteEntry);
remoteEntry2.setDomain(remoteDomain);
remoteEntry2.setAddress(globalAddressWithoutGbidSerialized);
GlobalDiscoveryEntry remoteEntry3 = new GlobalDiscoveryEntry(cachedRemoteEntry);
remoteEntry3.setParticipantId("participantId3");
remoteEntry3.setDomain(remoteDomain);
remoteEntry3.setAddress(globalAddressWithoutGbidSerialized);
doAnswer(createLookupAnswer(Arrays.asList(remoteEntry1,
remoteEntry2,
remoteEntry3))).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, INTERFACE_NAME, discoveryQos);
verify(localDiscoveryEntryStoreMock).lookup(org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(INTERFACE_NAME));
verify(globalDiscoveryEntryCacheMock).lookup(org.mockito.hamcrest.MockitoHamcrest.argThat(org.hamcrest.Matchers.arrayContainingInAnyOrder(domains)),
eq(INTERFACE_NAME),
eq(discoveryQos.getCacheMaxAge()));
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Object[] values = verifyGcdLookupAndPromiseFulfillment(1,
domains,
INTERFACE_NAME,
discoveryQos.getDiscoveryTimeout(),
knownGbids,
promise,
3);
DiscoveryEntryWithMetaInfo[] result = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(3, result.length);
boolean discoveryEntryFound = false;
boolean remoteEntry2Found = false;
boolean remoteEntry3Found = false;
for (DiscoveryEntryWithMetaInfo entry : result) {
if (entry.getParticipantId() == discoveryEntry.getParticipantId() && entry.getDomain().equals(localDomain)
&& entry.getIsLocal()) {
discoveryEntryFound = true;
}
if (entry.getParticipantId() == remoteEntry2.getParticipantId() && entry.getDomain().equals(remoteDomain)
&& !entry.getIsLocal()) {
remoteEntry2Found = true;
}
if (entry.getParticipantId() == remoteEntry3.getParticipantId() && entry.getDomain().equals(remoteDomain)
&& !entry.getIsLocal()) {
remoteEntry3Found = true;
}
}
verify(globalDiscoveryEntryCacheMock, never()).add(remoteEntry1);
verify(globalDiscoveryEntryCacheMock).add(remoteEntry2);
verify(globalDiscoveryEntryCacheMock).add(remoteEntry3);
verify(routingTable, never()).put(eq(remoteEntry1.getParticipantId()),
eq(globalAddressWithoutGbid),
eq(true),
anyLong());
verify(routingTable, times(1)).incrementReferenceCount(eq(remoteEntry1.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(remoteEntry2.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(remoteEntry3.getParticipantId()));
verify(routingTable).put(eq(remoteEntry2.getParticipantId()),
eq(globalAddressWithoutGbid),
eq(true),
anyLong());
verify(routingTable).put(eq(remoteEntry3.getParticipantId()),
eq(globalAddressWithoutGbid),
eq(true),
anyLong());
assertTrue(discoveryEntryFound && remoteEntry2Found && remoteEntry3Found);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceGbids_localAndGlobal_localGlobalEntry_invokesGcd_filtersCombinedResult() throws Exception {
String[] domains = { discoveryEntry.getDomain() };
List<DiscoveryEntry> localDiscoveryEntries = Arrays.asList(discoveryEntry);
List<GlobalDiscoveryEntry> globalDiscoveryEntries = Arrays.asList(globalDiscoveryEntry);
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge,
discoveryTimeout,
DiscoveryScope.LOCAL_AND_GLOBAL,
false);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(INTERFACE_NAME))).thenReturn(localDiscoveryEntries);
doAnswer(createLookupAnswer(globalDiscoveryEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
any());
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(discoveryEntry.getParticipantId()), anyLong());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domains,
INTERFACE_NAME,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] capturedDiscoveryEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(1, capturedDiscoveryEntries.length);
assertTrue(capturedDiscoveryEntries[0].getIsLocal());
verify(globalCapabilitiesDirectoryClient).lookup(any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryTimeout),
any());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(routingTable, times(1)).incrementReferenceCount(eq(capturedDiscoveryEntries[0].getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
verify(localDiscoveryEntryStoreMock).lookup(eq(domains), eq(INTERFACE_NAME));
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceGbids_globalOnly_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry() throws Exception {
// test assumes that the local entry is registered after the global lookup has been triggered
String[] domains = { discoveryEntry.getDomain() };
List<GlobalDiscoveryEntry> globalDiscoveryEntries = Arrays.asList(globalDiscoveryEntry);
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.GLOBAL_ONLY, false);
doAnswer(createLookupAnswer(globalDiscoveryEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
any());
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock)
.lookup(eq(discoveryEntry.getParticipantId()), anyLong());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domains,
INTERFACE_NAME,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] capturedDiscoveryEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(1, capturedDiscoveryEntries.length);
assertEquals(CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true, discoveryEntry),
capturedDiscoveryEntries[0]);
verify(globalCapabilitiesDirectoryClient).lookup(any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryTimeout),
any());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(routingTable, times(1)).incrementReferenceCount(eq(capturedDiscoveryEntries[0].getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
verify(localDiscoveryEntryStoreMock).lookup(eq(discoveryEntry.getParticipantId()), anyLong());
}
@Test
public void lookupDomIntf_globalOnlyWithCache_localGlobalEntryNoCachedEntry_doesNotInvokeGcd() throws Exception {
final long cacheMaxAge = 1L;
final long discoveryTimeout = 5000L;
final String[] domains = new String[]{ discoveryEntry.getDomain() };
final String interfaceName = discoveryEntry.getInterfaceName();
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.GLOBAL_ONLY, false);
// register in all gbids
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
reset(localDiscoveryEntryStoreMock, globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
doReturn(new HashSet<>(Arrays.asList(discoveryEntry))).when(localDiscoveryEntryStoreMock)
.lookupGlobalEntries(eq(domains), eq(interfaceName));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
DiscoveryEntryWithMetaInfo[] values = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promise,
"lookup failed")[0];
assertEquals(1, values.length);
assertEquals(true, values[0].getIsLocal());
verify(routingTable, times(1)).incrementReferenceCount(eq(discoveryEntry.getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
}
@Test
public void lookupDomIntf_globalOnlyNoCache_localGlobalEntryNoCachedEntry_invokesGcd() throws Exception {
final long cacheMaxAge = 0L;
final long discoveryTimeout = 5000L;
final String[] domains = new String[]{ discoveryEntry.getDomain() };
final String interfaceName = discoveryEntry.getInterfaceName();
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.GLOBAL_ONLY, false);
// register in all gbids
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
reset(localDiscoveryEntryStoreMock, globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
doAnswer(createLookupAnswer(new ArrayList<GlobalDiscoveryEntry>())).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
any());
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
DiscoveryEntryWithMetaInfo[] values = (DiscoveryEntryWithMetaInfo[]) checkPromiseSuccess(promise,
"lookup failed")[0];
assertEquals(0, values.length);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdGbids_globalOnlyWithCache_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry() throws Exception {
// test assumes that the local entry is registered after the global lookup has been triggered
lookupByParticipantIdGbids_globalOnly_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry(10000L);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdGbids_globalOnlyNoCache_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry() throws Exception {
// test assumes that the local entry is registered after the global lookup has been triggered
lookupByParticipantIdGbids_globalOnly_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry(0L);
}
private void lookupByParticipantIdGbids_globalOnly_invokesGcd_ignoresGlobalDuplicateOfLocalGlobalEntry(long cacheMaxAge) throws Exception {
// test assumes that the local entry is registered after the global lookup has been triggered
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.GLOBAL_ONLY, false);
doAnswer(createLookupAnswer(globalDiscoveryEntry)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any());
doAnswer(new Answer<Optional<DiscoveryEntry>>() {
// simulate provider registration after remote lookup has been triggered
boolean firstCall = true;
@Override
public Optional<DiscoveryEntry> answer(InvocationOnMock invocation) throws Throwable {
if (firstCall) {
firstCall = false;
return Optional.empty();
}
return Optional.of(discoveryEntry);
}
}).when(localDiscoveryEntryStoreMock).lookup(eq(discoveryEntry.getParticipantId()), anyLong());
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(discoveryEntry.getParticipantId(),
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedDiscoveryEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertTrue(capturedDiscoveryEntry.getIsLocal());
verify(globalCapabilitiesDirectoryClient).lookup(any(),
eq(discoveryEntry.getParticipantId()),
eq(discoveryTimeout),
any());
verify(localDiscoveryEntryStoreMock, times(2)).lookup(eq(discoveryEntry.getParticipantId()), anyLong());
verify(globalDiscoveryEntryCacheMock, never()).add(any(GlobalDiscoveryEntry.class));
verify(routingTable, times(1)).incrementReferenceCount(eq(discoveryEntry.getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupMultipleDomains_localThenGlobal() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2", "domain3" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setParticipantId("participantIdLocal");
localEntry.setDomain(domains[0]);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(Arrays.asList(localEntry));
GlobalDiscoveryEntry globalEntry = new GlobalDiscoveryEntry();
globalEntry.setParticipantId("participantIdCached");
globalEntry.setInterfaceName(interfaceName);
globalEntry.setDomain(domains[1]);
globalEntry.setAddress(globalAddress1Serialized);
doReturn(Arrays.asList(globalEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAge()));
doReturn(Optional.of(globalEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(globalEntry.getParticipantId()), eq(Long.MAX_VALUE));
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
domains[2],
interfaceName,
"participantIdRemote",
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
globalAddress1Serialized);
doAnswer(createLookupAnswer(Arrays.asList(remoteGlobalEntry))).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
eq(discoveryQos.getDiscoveryTimeout()),
eq(knownGbids));
Object[] values = checkPromiseSuccess(promise, "lookup failed");
Collection<DiscoveryEntry> captured = CapabilityUtils.convertToDiscoveryEntrySet(Arrays.asList((DiscoveryEntryWithMetaInfo[]) values[0]));
assertNotNull(captured);
assertEquals(3, captured.size());
assertTrue(captured.contains(localEntry));
assertTrue(captured.contains(new DiscoveryEntry(globalEntry)));
assertTrue(captured.contains(new DiscoveryEntry(remoteGlobalEntry)));
verify(routingTable, times(1)).incrementReferenceCount(eq(localEntry.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(globalEntry.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(remoteGlobalEntry.getParticipantId()));
verify(routingTable, never()).put(eq(localEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
verify(routingTable, times(1)).put(eq(globalEntry.getParticipantId()), any(Address.class), eq(true), anyLong());
verify(routingTable, times(1)).put(eq(remoteGlobalEntry.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupMultipleDomains_localThenGlobal_oneLocalAllCachedDomains_returnsLocalAndCachedEntries() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setParticipantId("participantIdLocal");
localEntry.setDomain(domains[0]);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(Arrays.asList(localEntry));
GlobalDiscoveryEntry globalCachedEntry1 = new GlobalDiscoveryEntry();
globalCachedEntry1.setParticipantId("participantIdCached1");
globalCachedEntry1.setInterfaceName(interfaceName);
globalCachedEntry1.setDomain(domains[0]);
globalCachedEntry1.setAddress(globalAddress1Serialized);
GlobalDiscoveryEntry globalCachedEntry2 = new GlobalDiscoveryEntry();
globalCachedEntry2.setParticipantId("participantIdCached2");
globalCachedEntry2.setInterfaceName(interfaceName);
globalCachedEntry2.setDomain(domains[1]);
globalCachedEntry2.setAddress(globalAddress1Serialized);
Set<GlobalDiscoveryEntry> globalCachedEntries = new HashSet<GlobalDiscoveryEntry>(Arrays.asList(globalCachedEntry1,
globalCachedEntry2));
doReturn(globalCachedEntries).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains), eq(interfaceName), eq(discoveryQos.getCacheMaxAge()));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
Object[] values = checkPromiseSuccess(promise, "lookup failed");
verify(localDiscoveryEntryStoreMock).lookup(eq(domains), eq(interfaceName));
verify(globalDiscoveryEntryCacheMock).lookup(eq(domains), eq(interfaceName), eq(ONE_DAY_IN_MS));
verify(globalCapabilitiesDirectoryClient, never()).lookup(any(), anyString(), anyLong(), any());
Collection<DiscoveryEntry> captured = CapabilityUtils.convertToDiscoveryEntrySet(Arrays.asList((DiscoveryEntryWithMetaInfo[]) values[0]));
assertEquals(3, captured.size());
assertTrue(captured.contains(localEntry));
assertTrue(captured.contains(new DiscoveryEntry(globalCachedEntry1)));
assertTrue(captured.contains(new DiscoveryEntry(globalCachedEntry2)));
verify(routingTable, times(1)).incrementReferenceCount(eq(localEntry.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(globalCachedEntry1.getParticipantId()));
verify(routingTable, never()).incrementReferenceCount(eq(globalCachedEntry2.getParticipantId()));
verify(routingTable, never()).put(eq(localEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
verify(routingTable, times(1)).put(eq(globalCachedEntry1.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
verify(routingTable, times(1)).put(eq(globalCachedEntry2.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupMultipleDomains_localThenGlobal_allDomainsLocal_returnsOnlyLocalEntries() throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interface1";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
discoveryQos.setCacheMaxAge(ONE_DAY_IN_MS);
DiscoveryEntry localEntry1 = new DiscoveryEntry();
localEntry1.setParticipantId("participantIdLocal1");
localEntry1.setDomain(domains[0]);
DiscoveryEntry localEntry2 = new DiscoveryEntry();
localEntry2.setParticipantId("participantIdLocal2");
localEntry2.setDomain(domains[1]);
when(localDiscoveryEntryStoreMock.lookup(eq(domains),
eq(interfaceName))).thenReturn(Arrays.asList(localEntry1,
localEntry2));
GlobalDiscoveryEntry globalCachedEntry = new GlobalDiscoveryEntry();
globalCachedEntry.setParticipantId("participantIdCached1");
globalCachedEntry.setInterfaceName(interfaceName);
globalCachedEntry.setDomain(domains[0]);
globalCachedEntry.setAddress(globalAddress1Serialized);
doReturn(Arrays.asList(globalCachedEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
Object[] values = checkPromiseSuccess(promise, "lookup failed");
verify(localDiscoveryEntryStoreMock).lookup(eq(domains), eq(interfaceName));
verify(globalDiscoveryEntryCacheMock).lookup(eq(domains), eq(interfaceName), eq(ONE_DAY_IN_MS));
verify(globalCapabilitiesDirectoryClient, never()).lookup(any(), anyString(), anyLong(), any());
Collection<DiscoveryEntry> captured = CapabilityUtils.convertToDiscoveryEntrySet(Arrays.asList((DiscoveryEntryWithMetaInfo[]) values[0]));
assertEquals(2, captured.size());
assertTrue(captured.contains(localEntry1));
assertTrue(captured.contains(localEntry2));
assertFalse(captured.contains(new DiscoveryEntry(globalCachedEntry)));
captured.forEach((entry) -> {
verify(routingTable, times(1)).incrementReferenceCount(eq(entry.getParticipantId()));
verify(routingTable, never()).put(eq(entry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
});
verify(routingTable, never()).put(eq(globalCachedEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantId_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue_localEntry() throws Exception {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos(Long.MAX_VALUE, Long.MAX_VALUE, DiscoveryScope.LOCAL_ONLY, false);
// local DiscoveryEntry
String localDomain = "localDomain";
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setDomain(localDomain);
localEntry.setInterfaceName(interfaceName);
localEntry.setParticipantId(participantId);
DiscoveryEntryWithMetaInfo localEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry);
when(localDiscoveryEntryStoreMock.lookup(eq(participantId),
eq(discoveryQos.getCacheMaxAge()))).thenReturn(Optional.of(localEntry));
Promise<Lookup3Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedLocalEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(localEntryWithMetaInfo, capturedLocalEntry);
verify(routingTable, times(1)).incrementReferenceCount(eq(capturedLocalEntry.getParticipantId()));
verify(routingTable, never()).put(eq(capturedLocalEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantId_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue_cachedEntry() throws Exception {
String participantId = discoveryEntry.getParticipantId();
String interfaceName = "interfaceName";
// cached global DiscoveryEntry
String globalDomain = "globalDomain";
GlobalDiscoveryEntry cachedGlobalEntry = new GlobalDiscoveryEntry();
cachedGlobalEntry.setDomain(globalDomain);
cachedGlobalEntry.setInterfaceName(interfaceName);
cachedGlobalEntry.setParticipantId(participantId);
cachedGlobalEntry.setAddress(globalAddress1Serialized);
DiscoveryEntryWithMetaInfo cachedGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedGlobalEntry);
when(globalDiscoveryEntryCacheMock.lookup(eq(participantId),
eq(Long.MAX_VALUE))).thenReturn(Optional.of(cachedGlobalEntry));
Promise<Lookup3Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedCachedGlobalEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(cachedGlobalEntryWithMetaInfo, capturedCachedGlobalEntry);
verify(routingTable, never()).incrementReferenceCount(eq(capturedCachedGlobalEntry.getParticipantId()));
verify(routingTable, times(1)).put(eq(capturedCachedGlobalEntry.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localOnly_noLocalEntry_doesNotInvokeGcd_returnsNoEntryForParticipant() throws Exception {
String participantId = discoveryEntry.getParticipantId();
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.LOCAL_ONLY, false);
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
new String[0]);
checkPromiseError(lookupPromise, DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verify(globalCapabilitiesDirectoryClient,
never()).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localOnly_localGlobalEntry_doesNotInvokeGcd_returnsLocalEntry() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_ONLY;
boolean localEntryAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localThenGlobal_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_THEN_GLOBAL;
boolean localEntryAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localThenGlobal_localGlobalEntry_doesNotInvokeGcd_returnsLocalEntry() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_THEN_GLOBAL;
boolean localEntryAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localAndGlobal_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_AND_GLOBAL;
boolean localEntryAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_localAndGlobal_localGlobalEntry_doesNotInvokeGcd_returnsLocalEntry() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_AND_GLOBAL;
boolean localEntryAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_globalOnly_localGlobalEntry_doesNotInvokeGcd_returnsLocalResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.GLOBAL_ONLY;
boolean localEntryAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_globalOnly_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.GLOBAL_ONLY;
boolean localEntryAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByParticipantIdDiscoveryScopeTest(discoveryScope, localEntryAvailable, invokesGcd, returnsLocalEntry);
}
private void lookupByParticipantIdDiscoveryScopeTest(DiscoveryScope discoveryScope,
boolean localEntryAvalaible,
boolean invokesGcd,
boolean returnsLocalEntry) throws Exception {
String participantId = discoveryEntry.getParticipantId();
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, discoveryScope, false);
if (localEntryAvalaible) {
// register in all gbids
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(eq(participantId),
eq(Long.MAX_VALUE));
}
doAnswer(createLookupAnswer(globalDiscoveryEntry)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any());
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedDiscoveryEntry = (DiscoveryEntryWithMetaInfo) values[0];
if (invokesGcd) {
verify(globalCapabilitiesDirectoryClient).lookup(any(), eq(participantId), eq(discoveryTimeout), any());
} else {
verify(globalCapabilitiesDirectoryClient, never()).lookup(any(), anyString(), anyLong(), any());
}
if (returnsLocalEntry) {
DiscoveryEntryWithMetaInfo expectedLocalDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
discoveryEntry);
assertEquals(expectedLocalDiscoveryEntry, capturedDiscoveryEntry);
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedLocalDiscoveryEntry.getParticipantId()));
verify(routingTable, never()).put(eq(expectedLocalDiscoveryEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
} else {
DiscoveryEntryWithMetaInfo expectedGlobalDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
globalDiscoveryEntry);
assertEquals(expectedGlobalDiscoveryEntry, capturedDiscoveryEntry);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, times(1)).put(eq(expectedGlobalDiscoveryEntry.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_globalOnly_localOnlyEntry_doesNotInvokeGcd_noEntryForParticipant() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.GLOBAL_ONLY;
String participantId = discoveryEntry.getParticipantId();
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, discoveryScope, false);
// register local only
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(discoveryEntry, true, knownGbids);
checkPromiseSuccess(promiseAdd, "add failed");
reset(localDiscoveryEntryStoreMock, globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
doReturn(Optional.of(discoveryEntry)).when(localDiscoveryEntryStoreMock).lookup(eq(participantId),
eq(Long.MAX_VALUE));
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
new String[0]);
checkPromiseError(lookupPromise, DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verify(localDiscoveryEntryStoreMock).lookup(eq(participantId), eq(Long.MAX_VALUE));
verifyNoMoreInteractions(globalDiscoveryEntryCacheMock, globalCapabilitiesDirectoryClient);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localOnly_noLocalEntry_doesNotInvokeGcd_returnsEmptyArray() throws Exception {
String[] domains = { discoveryEntry.getDomain() };
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, DiscoveryScope.LOCAL_ONLY, false);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(INTERFACE_NAME))).thenReturn(new ArrayList<>());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domains,
INTERFACE_NAME,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] capturedDiscoveryEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
assertEquals(0, capturedDiscoveryEntries.length);
verify(globalCapabilitiesDirectoryClient,
never()).lookup(any(), any(String[].class), anyString(), anyLong(), any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localOnly_localEntries_doesNotInvokeGcd_returnsLocalEntries() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_ONLY;
boolean localEntriesAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByDomainInterfaceDiscoveryScopeTest(discoveryScope, localEntriesAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localThenGlobal_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_THEN_GLOBAL;
boolean localEntriesAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByDomainInterfaceDiscoveryScopeTest(discoveryScope, localEntriesAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localThenGlobal_localEntries_doesNotInvokeGcd_returnsLocalEntries() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_THEN_GLOBAL;
boolean localEntriesAvailable = true;
boolean invokesGcd = false;
boolean returnsLocalEntry = true;
lookupByDomainInterfaceDiscoveryScopeTest(discoveryScope, localEntriesAvailable, invokesGcd, returnsLocalEntry);
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_localAndGlobal_noLocalEntry_invokesGcd_returnsRemoteResult() throws Exception {
DiscoveryScope discoveryScope = DiscoveryScope.LOCAL_AND_GLOBAL;
boolean localEntriesAvailable = false;
boolean invokesGcd = true;
boolean returnsLocalEntry = false;
lookupByDomainInterfaceDiscoveryScopeTest(discoveryScope, localEntriesAvailable, invokesGcd, returnsLocalEntry);
}
private void lookupByDomainInterfaceDiscoveryScopeTest(DiscoveryScope discoveryScope,
boolean localEntriesAvailable,
boolean invokesGcd,
boolean returnsLocalEntry) throws Exception {
String[] domains = { discoveryEntry.getDomain() };
List<DiscoveryEntry> discoveryEntries = Arrays.asList(discoveryEntry);
List<GlobalDiscoveryEntry> globalDiscoveryEntries = Arrays.asList(globalDiscoveryEntry);
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge, discoveryTimeout, discoveryScope, false);
if (localEntriesAvailable) {
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(INTERFACE_NAME))).thenReturn(discoveryEntries);
}
doAnswer(createLookupAnswer(globalDiscoveryEntries)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
Mockito.<String[]> any(),
anyString(),
anyLong(),
any());
Promise<Lookup2Deferred> lookupPromise = localCapabilitiesDirectory.lookup(domains,
INTERFACE_NAME,
discoveryQos,
new String[0]);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo[] capturedDiscoveryEntries = (DiscoveryEntryWithMetaInfo[]) values[0];
if (invokesGcd) {
verify(globalCapabilitiesDirectoryClient).lookup(any(),
eq(domains),
eq(INTERFACE_NAME),
eq(discoveryTimeout),
any());
} else {
verify(globalCapabilitiesDirectoryClient,
never()).lookup(any(), any(String[].class), anyString(), anyLong(), any());
}
if (returnsLocalEntry) {
DiscoveryEntryWithMetaInfo expectedLocalDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
discoveryEntry);
assertEquals(expectedLocalDiscoveryEntry, capturedDiscoveryEntries[0]);
verify(routingTable, times(1)).incrementReferenceCount(eq(expectedLocalDiscoveryEntry.getParticipantId()));
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
} else {
DiscoveryEntryWithMetaInfo expectedGlobalDiscoveryEntry = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
globalDiscoveryEntry);
assertEquals(expectedGlobalDiscoveryEntry, capturedDiscoveryEntries[0]);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, times(1)).put(eq(expectedGlobalDiscoveryEntry.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByParticipantIdWithGbids_respectsCacheMaxAge() throws Exception {
String participantId = discoveryEntry.getParticipantId();
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge,
discoveryTimeout,
DiscoveryScope.LOCAL_AND_GLOBAL,
false);
localCapabilitiesDirectory.lookup(participantId, discoveryQos, new String[0]);
verify(globalDiscoveryEntryCacheMock).lookup(eq(participantId), eq(cacheMaxAge));
}
@Test(timeout = TEST_TIMEOUT)
public void lookupByDomainInterfaceWithGbids_respectsCacheMaxAge() throws Exception {
String[] domains = { discoveryEntry.getDomain() };
final long cacheMaxAge = 10000L;
final long discoveryTimeout = 5000L;
DiscoveryQos discoveryQos = new DiscoveryQos(cacheMaxAge,
discoveryTimeout,
DiscoveryScope.LOCAL_AND_GLOBAL,
false);
localCapabilitiesDirectory.lookup(domains, INTERFACE_NAME, discoveryQos, new String[0]);
verify(globalDiscoveryEntryCacheMock).lookup(eq(domains), eq(INTERFACE_NAME), eq(cacheMaxAge));
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantId_DiscoveryEntryWithMetaInfoContainsExpectedIsLocalValue_globalEntry() throws Exception {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
// remote global DiscoveryEntry
String remoteGlobalDomain = "remoteglobaldomain";
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
remoteGlobalDomain,
interfaceName,
participantId,
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
globalAddress1Serialized);
DiscoveryEntryWithMetaInfo remoteGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
remoteGlobalEntry);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
Callback<GlobalDiscoveryEntry> callback = (Callback<GlobalDiscoveryEntry>) invocation.getArguments()[0];
callback.onSuccess(remoteGlobalEntry);
return null;
}
}).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
eq(knownGbids));
Promise<Lookup3Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId);
Object[] values = checkPromiseSuccess(lookupPromise, "lookup failed");
DiscoveryEntryWithMetaInfo capturedRemoteGlobalEntry = (DiscoveryEntryWithMetaInfo) values[0];
assertEquals(remoteGlobalEntryWithMetaInfo, capturedRemoteGlobalEntry);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, times(1)).put(eq(remoteGlobalEntryWithMetaInfo.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantId_DiscoveryQosTtlIsUsed() throws Exception {
String participantId = "participantId";
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
long discoveryTimeout = 1000000000;
discoveryQos.setDiscoveryTimeout(discoveryTimeout);
// remote global DiscoveryEntry
String remoteGlobalDomain = "remoteglobaldomain";
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
remoteGlobalDomain,
interfaceName,
participantId,
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
globalAddress1Serialized);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
Callback<GlobalDiscoveryEntry> callback = (Callback<GlobalDiscoveryEntry>) invocation.getArguments()[0];
callback.onSuccess(remoteGlobalEntry);
return null;
}
}).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
eq(knownGbids));
Promise<Lookup4Deferred> lookupPromise = localCapabilitiesDirectory.lookup(participantId,
discoveryQos,
new String[0]);
checkPromiseSuccess(lookupPromise, "lookup failed");
verify(globalCapabilitiesDirectoryClient).lookup(any(), eq(participantId), eq(discoveryTimeout), any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, times(1)).put(eq(participantId), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterface_DiscoveryEntriesWithMetaInfoContainExpectedIsLocalValue_localCachedAndGlobalEntries() throws InterruptedException {
String globalDomain = "globaldomain";
String remoteGlobalDomain = "remoteglobaldomain";
String[] domains = new String[]{ "localdomain", globalDomain, remoteGlobalDomain };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.LOCAL_THEN_GLOBAL);
// local DiscoveryEntry
DiscoveryEntry localEntry = new DiscoveryEntry();
localEntry.setParticipantId("participantIdLocal");
localEntry.setDomain(domains[0]);
DiscoveryEntryWithMetaInfo localEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(true,
localEntry);
when(localDiscoveryEntryStoreMock.lookup(eq(domains), eq(interfaceName))).thenReturn(Arrays.asList(localEntry));
// cached global DiscoveryEntry
GlobalDiscoveryEntry cachedGlobalEntry = new GlobalDiscoveryEntry();
cachedGlobalEntry.setParticipantId("participantIdCached");
cachedGlobalEntry.setInterfaceName(interfaceName);
cachedGlobalEntry.setDomain(globalDomain);
cachedGlobalEntry.setAddress(globalAddress1Serialized);
DiscoveryEntryWithMetaInfo cachedGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
cachedGlobalEntry);
doReturn(Arrays.asList(cachedGlobalEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(domains),
eq(interfaceName),
eq(discoveryQos.getCacheMaxAge()));
doReturn(Optional.of(cachedGlobalEntry)).when(globalDiscoveryEntryCacheMock)
.lookup(eq(cachedGlobalEntry.getParticipantId()), eq(Long.MAX_VALUE));
// remote global DiscoveryEntry
final GlobalDiscoveryEntry remoteGlobalEntry = new GlobalDiscoveryEntry(new Version(0, 0),
remoteGlobalDomain,
interfaceName,
"participantIdRemote",
new ProviderQos(),
System.currentTimeMillis(),
System.currentTimeMillis() + 10000L,
"publicKeyId",
globalAddress1Serialized);
DiscoveryEntryWithMetaInfo remoteGlobalEntryWithMetaInfo = CapabilityUtils.convertToDiscoveryEntryWithMetaInfo(false,
remoteGlobalEntry);
doAnswer(createLookupAnswer(Arrays.asList(remoteGlobalEntry))).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
eq(knownGbids));
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
Object[] values = checkPromiseSuccess(promise, "lookup failed");
List<DiscoveryEntryWithMetaInfo> capabilities = Arrays.asList((DiscoveryEntryWithMetaInfo[]) values[0]);
assertEquals(3, capabilities.size());
assertTrue(capabilities.contains(localEntryWithMetaInfo));
verify(routingTable, times(1)).incrementReferenceCount(eq(localEntryWithMetaInfo.getParticipantId()));
verify(routingTable, never()).put(eq(localEntryWithMetaInfo.getParticipantId()),
any(Address.class),
any(Boolean.class),
anyLong());
assertTrue(capabilities.contains(cachedGlobalEntryWithMetaInfo));
verify(routingTable, never()).incrementReferenceCount(eq(cachedGlobalEntryWithMetaInfo.getParticipantId()));
verify(routingTable, times(1)).put(eq(cachedGlobalEntryWithMetaInfo.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
assertTrue(capabilities.contains(remoteGlobalEntryWithMetaInfo));
verify(routingTable, never()).incrementReferenceCount(eq(remoteGlobalEntryWithMetaInfo.getParticipantId()));
verify(routingTable, times(1)).put(eq(remoteGlobalEntryWithMetaInfo.getParticipantId()),
any(Address.class),
eq(true),
anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_exception() throws InterruptedException {
String domain = "domain";
String[] domains = new String[]{ domain };
String interfaceName = "interface";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
JoynrRuntimeException exception = new JoynrRuntimeException("lookup failed");
ProviderRuntimeException expectedException = new ProviderRuntimeException(exception.toString());
doAnswer(createVoidAnswerWithException(exception)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup2Deferred> promise = localCapabilitiesDirectory.lookup(domains,
interfaceName,
discoveryQos,
knownGbids);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
checkPromiseException(promise, expectedException);
}
private void testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String domain = "domain";
String[] domains = new String[]{ domain };
String interfaceName = "interface";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup2Deferred> promise = localCapabilitiesDirectory.lookup(domains,
interfaceName,
discoveryQos,
knownGbids);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
checkPromiseError(promise, expectedError);
}
private void testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String domain = "domain";
String[] domains = new String[]{ domain };
String interfaceName = "interface";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup1Deferred> promise = localCapabilitiesDirectory.lookup(domains, interfaceName, discoveryQos);
verify(globalCapabilitiesDirectoryClient).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
eq(domains),
eq(interfaceName),
anyLong(),
ArgumentMatchers.<String[]> any());
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
checkPromiseErrorInProviderRuntimeException(promise, expectedError);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceIsProperlyRejected_invalidGbid() throws InterruptedException {
testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceIsProperlyRejected_unknownGbid() throws InterruptedException {
testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceIsProperlyRejected_internalError() throws InterruptedException {
testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceIsProperlyRejected_noEntryForSelectedBackend() throws InterruptedException {
testLookupByDomainInterfaceIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_invalidGbid() throws InterruptedException {
testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_unknownGbid() throws InterruptedException {
testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_internalError() throws InterruptedException {
testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbidsIsProperlyRejected_noEntryForSelectedBackend() throws InterruptedException {
testLookupByDomainInterfaceWithGbidsIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_exception() throws InterruptedException {
String participantId = "participantId";
DiscoveryQos discoveryQos = new DiscoveryQos();
discoveryQos.setDiscoveryScope(DiscoveryScope.GLOBAL_ONLY);
JoynrRuntimeException exception = new JoynrRuntimeException("lookup failed");
ProviderRuntimeException expectedException = new ProviderRuntimeException(exception.toString());
doAnswer(createVoidAnswerWithException(exception)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup4Deferred> promise = localCapabilitiesDirectory.lookup(participantId, discoveryQos, knownGbids);
checkPromiseException(promise, expectedException);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
private void testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String participantId = "participantId";
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
ArgumentMatchers.<String[]> any());
DiscoveryQos discoveryQos = new DiscoveryQos(10000L, 500L, DiscoveryScope.LOCAL_AND_GLOBAL, false);
Promise<Lookup4Deferred> promise = localCapabilitiesDirectory.lookup(participantId, discoveryQos, knownGbids);
checkPromiseError(promise, expectedError);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
private void testLookupByParticipantIdIsProperlyRejected(DiscoveryError expectedError) throws InterruptedException {
String participantId = "participantId";
doAnswer(createVoidAnswerWithDiscoveryError(expectedError)).when(globalCapabilitiesDirectoryClient)
.lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
eq(participantId),
anyLong(),
ArgumentMatchers.<String[]> any());
Promise<Lookup3Deferred> promise = localCapabilitiesDirectory.lookup(participantId);
checkPromiseErrorInProviderRuntimeException(promise, expectedError);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_invalidGbid() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_unknownGbid() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_internalError() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_noEntryForSelectedBackend() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdIsProperlyRejected_noEntryForParticipant() throws InterruptedException {
testLookupByParticipantIdIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_invalidGbid() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_unknownGbid() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_internalError() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.INTERNAL_ERROR);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_noEntryForSelectedBackend() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_SELECTED_BACKENDS);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbidsIsProperlyRejected_noEntryForParticipant() throws InterruptedException {
testLookupByParticipantIdWithGbidsIsProperlyRejected(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_unknownGbids() throws InterruptedException {
String[] gbids = new String[]{ "not", "known" };
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_unknownGbids() throws InterruptedException {
String[] gbids = new String[]{ "not", "known" };
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.UNKNOWN_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_invalidGbid_emptyGbid() throws InterruptedException {
String[] gbids = new String[]{ "" };
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_invalidGbid_emptyGbid() throws InterruptedException {
String[] gbids = new String[]{ "" };
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_invalidGbid_duplicateGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], knownGbids[0], knownGbids[1] };
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_invalidGbid_duplicateGbid() throws InterruptedException {
String[] gbids = new String[]{ knownGbids[1], knownGbids[0], knownGbids[1] };
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_invalidGbid_nullGbid() throws InterruptedException {
String[] gbids = new String[]{ null };
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_invalidGbid_nullGbid() throws InterruptedException {
String[] gbids = new String[]{ null };
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByDomainInterfaceWithGbids_invalidGbid_nullGbidArray() throws InterruptedException {
String[] gbids = null;
testLookupByDomainInterfaceWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
@Test(timeout = TEST_TIMEOUT)
public void testLookupByParticipantIdWithGbids_invalidGbid_nullGbidArray() throws InterruptedException {
String[] gbids = null;
testLookupByParticipantIdWithDiscoveryError(gbids, DiscoveryError.INVALID_GBID);
}
private void testLookupByDomainInterfaceWithDiscoveryError(String[] gbids,
DiscoveryError expectedError) throws InterruptedException {
String[] domains = new String[]{ "domain1", "domain2" };
String interfaceName = "interfaceName";
DiscoveryQos discoveryQos = new DiscoveryQos();
Promise<Lookup2Deferred> promise = localCapabilitiesDirectory.lookup(domains,
interfaceName,
discoveryQos,
gbids);
verify(globalCapabilitiesDirectoryClient,
never()).lookup(ArgumentMatchers.<CallbackWithModeledError<List<GlobalDiscoveryEntry>, DiscoveryError>> any(),
any(String[].class),
anyString(),
anyLong(),
any(String[].class));
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
checkPromiseError(promise, expectedError);
}
private void testLookupByParticipantIdWithDiscoveryError(String[] gbids,
DiscoveryError expectedError) throws InterruptedException {
String participantId = "participantId";
Promise<Lookup4Deferred> promise = localCapabilitiesDirectory.lookup(participantId, new DiscoveryQos(), gbids);
verify(globalCapabilitiesDirectoryClient,
never()).lookup(ArgumentMatchers.<CallbackWithModeledError<GlobalDiscoveryEntry, DiscoveryError>> any(),
anyString(),
anyLong(),
any(String[].class));
checkPromiseError(promise, expectedError);
verify(routingTable, never()).incrementReferenceCount(any());
verify(routingTable, never()).put(anyString(), any(Address.class), any(Boolean.class), anyLong());
}
private static void checkPromiseException(Promise<?> promise,
Exception expectedException) throws InterruptedException {
CountDownLatch countDownLatch = new CountDownLatch(1);
promise.then(new PromiseListener() {
@Override
public void onRejection(JoynrException exception) {
assertTrue(expectedException.getClass().isInstance(exception));
assertEquals(expectedException, exception);
countDownLatch.countDown();
}
@Override
public void onFulfillment(Object... values) {
fail("Unexpected fulfillment when expecting rejection.");
}
});
assertTrue(countDownLatch.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
}
private static void checkPromiseError(Promise<?> promise,
DiscoveryError exptectedError) throws InterruptedException {
CountDownLatch countDownLatch = new CountDownLatch(1);
promise.then(new PromiseListener() {
@Override
public void onRejection(JoynrException exception) {
if (exception instanceof ApplicationException) {
DiscoveryError error = ((ApplicationException) exception).getError();
assertEquals(exptectedError, error);
countDownLatch.countDown();
} else {
fail("Did not receive an ApplicationException on rejection.");
}
}
@Override
public void onFulfillment(Object... values) {
fail("Unexpected fulfillment when expecting rejection.");
}
});
assertTrue(countDownLatch.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
}
private static void checkPromiseErrorInProviderRuntimeException(Promise<?> promise,
DiscoveryError exptectedError) throws InterruptedException {
CountDownLatch countDownLatch = new CountDownLatch(1);
promise.then(new PromiseListener() {
@Override
public void onRejection(JoynrException exception) {
if (exception instanceof ProviderRuntimeException) {
assertTrue(((ProviderRuntimeException) exception).getMessage().contains(exptectedError.name()));
countDownLatch.countDown();
} else {
fail("Did not receive a ProviderRuntimeException on rejection.");
}
}
@Override
public void onFulfillment(Object... values) {
fail("Unexpected fulfillment when expecting rejection.");
}
});
assertTrue(countDownLatch.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
}
private static Object[] checkPromiseSuccess(Promise<? extends AbstractDeferred> promise,
String onRejectionMessage) throws InterruptedException {
ArrayList<Object> result = new ArrayList<>();
CountDownLatch countDownLatch = new CountDownLatch(1);
promise.then(new PromiseListener() {
@Override
public void onRejection(JoynrException error) {
fail(onRejectionMessage + ": " + error);
}
@Override
public void onFulfillment(Object... values) {
result.addAll(Arrays.asList(values));
countDownLatch.countDown();
}
});
assertTrue(onRejectionMessage + ": promise timeout",
countDownLatch.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
return result.toArray(new Object[result.size()]);
}
@Test(timeout = TEST_TIMEOUT)
public void remove_globallyRegistered_GcdCalled() throws InterruptedException {
when(globalAddressProvider.get()).thenReturn(new MqttAddress("testgbid", "testtopic"));
Boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> addPromise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(addPromise, "add failed");
CountDownLatch cdlStart = new CountDownLatch(1);
CountDownLatch cdlDone = new CountDownLatch(1);
doAnswer(createAnswerWithDelayedSuccess(cdlStart,
cdlDone,
1500)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(globalDiscoveryEntry.getParticipantId()),
any(String[].class));
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry));
localCapabilitiesDirectory.remove(globalDiscoveryEntry.getParticipantId());
assertTrue(cdlStart.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(discoveryEntry.getParticipantId()),
any(String[].class));
verify(localDiscoveryEntryStoreMock, times(0)).remove(any(String.class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(any(String.class));
assertTrue(cdlDone.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(discoveryEntry.getParticipantId()));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
}
@Test(timeout = TEST_TIMEOUT)
public void remove_localProvider_GcdNotCalled() throws InterruptedException {
discoveryEntry.getQos().setScope(ProviderScope.LOCAL);
Boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> addPromise = localCapabilitiesDirectory.add(discoveryEntry, awaitGlobalRegistration);
checkPromiseSuccess(addPromise, "add failed");
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry));
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
Thread.sleep(500);
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(0)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
anyString(),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(discoveryEntry.getParticipantId()));
}
@Test(timeout = TEST_TIMEOUT)
public void remove_participantNotRegisteredNoGbids_GcdNotCalled() throws InterruptedException {
String participantId = "unknownparticipantId";
CountDownLatch cdl = new CountDownLatch(1);
doReturn(Optional.empty()).when(localDiscoveryEntryStoreMock).lookup(eq(participantId), anyLong());
localCapabilitiesDirectory.remove(participantId);
assertFalse(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
never()).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(String.class),
any(String[].class));
verify(localDiscoveryEntryStoreMock, never()).remove(any(String.class));
verify(globalDiscoveryEntryCacheMock, never()).remove(any(String.class));
}
@Test(timeout = TEST_TIMEOUT)
public void remove_participantNotRegisteredGbidsMapped_GcdCalled() throws InterruptedException {
// this test assumes that the participant gets registered by a queued add task after enqueuing the remove task
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
doReturn(Optional.empty()).when(localDiscoveryEntryStoreMock)
.lookup(eq(provisionedGlobalDiscoveryEntry.getParticipantId()), anyLong());
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(1)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(provisionedGlobalDiscoveryEntry.getParticipantId()));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
}
@Test(timeout = TEST_TIMEOUT)
public void remove_FailureStates_TimeoutException() throws InterruptedException {
CountDownLatch cdl = new CountDownLatch(2);
doAnswer(createVoidAnswerWithException(cdl,
new JoynrTimeoutException(0))).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
atLeast(2)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
}
@Test(timeout = TEST_TIMEOUT)
public void remove_FailureStates_NonTimeoutException() throws InterruptedException {
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithException(cdl,
new JoynrCommunicationException())).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(1)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
}
@Test(timeout = TEST_TIMEOUT)
public void remove_FailureStates_DiscoveryError_NoEntry() throws InterruptedException {
// covers NO_ENTRY_FOR_PARTICIPANT as well as NO_ENTRY_FOR_SELECTED_BACKENDS
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithDiscoveryError(cdl,
DiscoveryError.NO_ENTRY_FOR_PARTICIPANT)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(1)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(1)).remove(eq(provisionedGlobalDiscoveryEntry.getParticipantId()));
}
@Test(timeout = TEST_TIMEOUT)
public void remove_FailureStates_DiscoveryError_InvalidGbid() throws InterruptedException {
// Also covers UNKNOWN_GBID and INTERNAL_ERROR
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createVoidAnswerWithDiscoveryError(cdl,
DiscoveryError.INVALID_GBID)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
localCapabilitiesDirectory.remove(provisionedGlobalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verifyNoMoreInteractions(routingTable);
verify(globalCapabilitiesDirectoryClient,
times(1)).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(provisionedGlobalDiscoveryEntry.getParticipantId()),
any(String[].class));
verify(globalDiscoveryEntryCacheMock, times(0)).remove(anyString());
verify(localDiscoveryEntryStoreMock, times(0)).remove(anyString());
}
private void testRemoveUsesSameGbidOrderAsAdd(String[] selectedGbids) throws InterruptedException {
String[] expectedGbids = selectedGbids.clone();
String participantId = LocalCapabilitiesDirectoryTest.class.getName() + ".removeUsesSameGbidOrderAsAdd."
+ Arrays.toString(selectedGbids);
String domain = "testDomain";
ProviderQos providerQos = new ProviderQos();
providerQos.setScope(ProviderScope.GLOBAL);
globalDiscoveryEntry = new GlobalDiscoveryEntry(new Version(47, 11),
domain,
INTERFACE_NAME,
participantId,
providerQos,
System.currentTimeMillis(),
expiryDateMs,
publicKeyId,
globalAddress1Serialized);
boolean awaitGlobalRegistration = true;
Promise<Add1Deferred> promise = localCapabilitiesDirectory.add(globalDiscoveryEntry,
awaitGlobalRegistration,
selectedGbids);
checkPromiseSuccess(promise, "add failed in testRemoveUsesSameGbidOrderAsAdd");
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(String.class),
any(String[].class));
when(localDiscoveryEntryStoreMock.lookup(globalDiscoveryEntry.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(globalDiscoveryEntry));
localCapabilitiesDirectory.remove(globalDiscoveryEntry.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient).remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(String.class),
eq(expectedGbids));
verifyNoMoreInteractions(routingTable);
}
@Test(timeout = TEST_TIMEOUT)
public void testRemoveUsesSameGbidOrderAsAdd() throws InterruptedException {
testRemoveUsesSameGbidOrderAsAdd(new String[]{ knownGbids[0] });
testRemoveUsesSameGbidOrderAsAdd(new String[]{ knownGbids[1] });
testRemoveUsesSameGbidOrderAsAdd(new String[]{ knownGbids[0], knownGbids[1] });
testRemoveUsesSameGbidOrderAsAdd(new String[]{ knownGbids[1], knownGbids[0] });
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerDoesNotCrashOnExceptionAfterRemoveTaskFinished() throws InterruptedException,
IllegalAccessException {
/// We have to add before we can remove anything
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(),
anyLong(),
eq(expectedGbids));
reset(globalCapabilitiesDirectoryClient);
///
///The real test starts here
CountDownLatch cdl1 = new CountDownLatch(1);
CountDownLatch cdl2 = new CountDownLatch(1);
AtomicBoolean cbCalled = new AtomicBoolean();
GcdTask.CallbackCreator callbackCreator = new GcdTask.CallbackCreator() {
@Override
public CallbackWithModeledError<Void, DiscoveryError> createCallback() {
return new CallbackWithModeledError<Void, DiscoveryError>() {
@Override
public void onFailure(DiscoveryError errorEnum) {
// taskFinished is called manually
logger.error("onFailure callback called, DiscoveryError {}", errorEnum);
cbCalled.set(true);
}
@Override
public void onFailure(JoynrRuntimeException runtimeException) {
// taskFinished is called manually
logger.error("onFailure callback called:", runtimeException);
cbCalled.set(true);
}
@Override
public void onSuccess(Void result) {
// taskFinished is called manually
logger.error("onSuccess callback called");
cbCalled.set(true);
}
};
}
};
class TestGcdRemoveTask extends GcdTask {
public TestGcdRemoveTask(CallbackCreator callbackCreator, String participantId) {
super(MODE.REMOVE, callbackCreator, participantId, null, null, 0l, true);
}
@Override
public String getParticipantId() {
cdl1.countDown();
try {
// block GcdTaskSequencer until taskFinished has been called
cdl2.await();
} catch (InterruptedException e) {
// ignore
}
return super.getParticipantId();
}
}
TestGcdRemoveTask task = new TestGcdRemoveTask(callbackCreator, globalDiscoveryEntry.getParticipantId());
gcdTaskSequencer.addTask(task);
assertTrue(cdl1.await(DEFAULT_WAIT_TIME_MS * 100, TimeUnit.MILLISECONDS));
// call taskFinished while task is processed
gcdTaskSequencer.taskFinished();
cdl2.countDown();
verify(globalCapabilitiesDirectoryClient,
timeout(1000).times(1)).remove(any(), eq(globalDiscoveryEntry.getParticipantId()), eq(expectedGbids));
// check that GcdTaskSequencer is still alive
localCapabilitiesDirectory.addToAll(discoveryEntry, awaitGlobalRegistration);
verify(globalCapabilitiesDirectoryClient, timeout(1000).times(1)).add(any(),
any(),
anyLong(),
eq(expectedGbids));
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient);
assertFalse(cbCalled.get());
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterRemoveSuccess() throws InterruptedException, IllegalAccessException {
/// We have to add before we can remove anything
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(),
anyLong(),
eq(expectedGbids));
reset(globalCapabilitiesDirectoryClient);
///
///The real test starts here
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).remove(any(), any(), any());
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.REMOVE.equals(arg.getMode())));
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).remove(callbackCaptor.capture(),
eq(discoveryEntry.getParticipantId()),
eq(expectedGbids));
callbackCaptor.getValue().onSuccess(null);
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerRetriesRemoveOnJoynrTimeoutExceptionOnly() throws InterruptedException,
IllegalAccessException {
///We need to add before we can remove
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(),
anyLong(),
eq(expectedGbids));
reset(globalCapabilitiesDirectoryClient);
///
///The real test starts here
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).remove(any(), any(), any());
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.REMOVE.equals(arg.getMode())));
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).remove(callbackCaptor.capture(),
eq(discoveryEntry.getParticipantId()),
eq(expectedGbids));
CountDownLatch cdl2 = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl2.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).remove(any(), any(), any());
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
verify(gcdTaskSequencerSpy).retryTask();
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
assertTrue(cdl2.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(2)).remove(callbackCaptor.capture(),
eq(discoveryEntry.getParticipantId()),
eq(expectedGbids));
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
verify(gcdTaskSequencerSpy).taskFinished();
// After handling a non-timeout exception, the callback is 'disabled'
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void taskSequencerNotReleasedAfterRemoveDiscoveryError() throws InterruptedException,
IllegalAccessException {
///We need to add before we can remove
String[] expectedGbids = knownGbids.clone();
final boolean awaitGlobalRegistration = true;
Promise<AddToAllDeferred> promise = localCapabilitiesDirectory.addToAll(discoveryEntry,
awaitGlobalRegistration);
checkPromiseSuccess(promise, "add failed");
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
any(),
anyLong(),
eq(expectedGbids));
reset(globalCapabilitiesDirectoryClient);
///
///The real test starts here
setFieldValue(localCapabilitiesDirectory, "gcdTaskSequencer", gcdTaskSequencerSpy);
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
cdl.countDown();
return null;
}
}).when(globalCapabilitiesDirectoryClient).remove(any(), any(), any());
localCapabilitiesDirectory.remove(discoveryEntry.getParticipantId());
verify(gcdTaskSequencerSpy).addTask(argThat(arg -> GcdTask.MODE.REMOVE.equals(arg.getMode())));
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).remove(callbackCaptor.capture(),
eq(discoveryEntry.getParticipantId()),
eq(expectedGbids));
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verify(gcdTaskSequencerSpy).taskFinished();
callbackCaptor.getValue().onFailure(new JoynrTimeoutException(12345));
callbackCaptor.getValue().onSuccess(null);
callbackCaptor.getValue().onFailure(new JoynrRuntimeException());
callbackCaptor.getValue().onFailure(DiscoveryError.NO_ENTRY_FOR_PARTICIPANT);
verifyNoMoreInteractions(globalCapabilitiesDirectoryClient, gcdTaskSequencerSpy);
}
@Test(timeout = TEST_TIMEOUT)
public void callTouchForGlobalParticipantIds() throws InterruptedException {
final String participantId1 = "participantId1";
final String participantId2 = "participantId2";
final long toleranceMs = freshnessUpdateIntervalMs * 2 / 3;
GlobalDiscoveryEntry entry1 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
entry1.getQos().setScope(ProviderScope.GLOBAL);
entry1.setParticipantId(participantId1);
GlobalDiscoveryEntry entry2 = new GlobalDiscoveryEntry(entry1);
entry2.setParticipantId(participantId2);
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(entry1, true);
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(entry2, true);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
ArgumentCaptor<Long> lastSeenDateCaptor = ArgumentCaptor.forClass(Long.class);
ArgumentCaptor<Long> expiryDateCaptor = ArgumentCaptor.forClass(Long.class);
String[] touchedParticipantIds = new String[]{ participantId1, participantId2 };
String[] expectedParticipantIds = touchedParticipantIds.clone();
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(touchedParticipantIds);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.touch(ArgumentMatchers.<Callback<Void>> any(),
eq(expectedParticipantIds),
anyString());
Thread.sleep(freshnessUpdateIntervalMs); // make sure that the inital delay has expired before starting the runnable
final long expectedLastSeenDateMs = System.currentTimeMillis();
final long expectedExpiryDateMs = expectedLastSeenDateMs + DEFAULT_EXPIRY_TIME_MS;
Runnable runnable = runnableCaptor.getValue();
runnable.run();
verify(localDiscoveryEntryStoreMock, times(1)).touchDiscoveryEntries(lastSeenDateCaptor.capture(),
expiryDateCaptor.capture());
assertTrue(Math.abs(lastSeenDateCaptor.getValue() - expectedLastSeenDateMs) <= toleranceMs);
assertTrue(Math.abs(expiryDateCaptor.getValue() - expectedExpiryDateMs) <= toleranceMs);
verify(globalDiscoveryEntryCacheMock, times(1)).touchDiscoveryEntries(eq(expectedParticipantIds),
eq(lastSeenDateCaptor.getValue()),
eq(expiryDateCaptor.getValue()));
assertTrue(Math.abs(lastSeenDateCaptor.getValue() - expectedLastSeenDateMs) <= toleranceMs);
assertTrue(Math.abs(expiryDateCaptor.getValue() - expectedExpiryDateMs) <= toleranceMs);
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(expectedParticipantIds),
anyString());
}
@Test
public void touchNotCalled_noParticipantIdsToTouch() throws InterruptedException {
String[] participantIdsToTouch = new String[0];
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(participantIdsToTouch);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
Runnable runnable = runnableCaptor.getValue();
runnable.run();
verify(globalCapabilitiesDirectoryClient, times(0)).touch(ArgumentMatchers.<Callback<Void>> any(),
any(),
anyString());
}
@Test
public void touchCalledOnce_multipleParticipantIdsForSingleGbid() throws InterruptedException {
String participantId1 = "participantId1";
String participantId2 = "participantId2";
String gbid = knownGbids[1];
String[] gbids = { gbid };
GlobalDiscoveryEntry entry1 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
entry1.getQos().setScope(ProviderScope.GLOBAL);
entry1.setParticipantId(participantId1);
entry1.setExpiryDateMs(0l);
entry1.setLastSeenDateMs(0l);
GlobalDiscoveryEntry entry2 = new GlobalDiscoveryEntry(entry1);
entry2.setParticipantId(participantId2);
Promise<Add1Deferred> promiseAdd1 = localCapabilitiesDirectory.add(entry1, true, gbids);
Promise<Add1Deferred> promiseAdd2 = localCapabilitiesDirectory.add(entry2, true, gbids);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
// Mock return values of localDiscoveryEntryStore.touchDiscoveryEntries
String[] participantIdsToTouch = new String[]{ participantId1, participantId2 };
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(participantIdsToTouch);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.touch(ArgumentMatchers.<Callback<Void>> any(),
eq(participantIdsToTouch),
anyString());
Thread.sleep(freshnessUpdateIntervalMs); // make sure that the initial delay has expired before starting the runnable
Runnable runnable = runnableCaptor.getValue();
runnable.run();
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(participantIdsToTouch),
eq(gbid));
}
@Test
public void touchCalledOnce_singleParticipantIdForMultipleGbids() throws InterruptedException {
String participantId1 = "participantId1";
String gbid1 = knownGbids[1];
String gbid2 = knownGbids[2];
String[] gbids = { gbid1, gbid2 };
GlobalDiscoveryEntry entry1 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
entry1.getQos().setScope(ProviderScope.GLOBAL);
entry1.setParticipantId(participantId1);
entry1.setExpiryDateMs(0l);
entry1.setLastSeenDateMs(0l);
Promise<Add1Deferred> promiseAdd = localCapabilitiesDirectory.add(entry1, true, gbids);
checkPromiseSuccess(promiseAdd, "add failed");
// Mock return values of localDiscoveryEntryStore.touchDiscoveryEntries
String[] participantIdsToTouch = new String[]{ participantId1 };
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(participantIdsToTouch);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
CountDownLatch cdl = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.touch(ArgumentMatchers.<Callback<Void>> any(),
eq(participantIdsToTouch),
anyString());
Thread.sleep(freshnessUpdateIntervalMs); // make sure that the initial delay has expired before starting the runnable
Runnable runnable = runnableCaptor.getValue();
runnable.run();
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(participantIdsToTouch),
eq(gbid1));
}
@Test
public void touchCalledTwice_twoParticipantIdsForDifferentGbids() throws InterruptedException {
String participantId1 = "participantId1";
String participantId2 = "participantId2";
String gbid1 = knownGbids[1];
String gbid2 = knownGbids[2];
String[] gbids1 = { gbid1 };
String[] gbids2 = { gbid2 };
GlobalDiscoveryEntry entry1 = new GlobalDiscoveryEntry(globalDiscoveryEntry);
entry1.getQos().setScope(ProviderScope.GLOBAL);
entry1.setParticipantId(participantId1);
entry1.setExpiryDateMs(0l);
entry1.setLastSeenDateMs(0l);
GlobalDiscoveryEntry entry2 = new GlobalDiscoveryEntry(entry1);
entry2.setParticipantId(participantId2);
Promise<Add1Deferred> promiseAdd1 = localCapabilitiesDirectory.add(entry1, true, gbids1);
Promise<Add1Deferred> promiseAdd2 = localCapabilitiesDirectory.add(entry2, true, gbids2);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
// Mock return values of localDiscoveryEntryStore.touchDiscoveryEntries
String[] participantIdsToTouch = new String[]{ participantId1, participantId2 };
when(localDiscoveryEntryStoreMock.touchDiscoveryEntries(anyLong(),
anyLong())).thenReturn(participantIdsToTouch);
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(freshnessUpdateIntervalMs),
eq(freshnessUpdateIntervalMs),
eq(TimeUnit.MILLISECONDS));
String[] expectedParticipantIds1 = new String[]{ participantId1 };
String[] expectedParticipantIds2 = new String[]{ participantId2 };
CountDownLatch cdl = new CountDownLatch(2);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.touch(ArgumentMatchers.<Callback<Void>> any(),
ArgumentMatchers.<String[]> any(),
anyString());
Thread.sleep(freshnessUpdateIntervalMs); // make sure that the initial delay has expired before starting the runnable
Runnable runnable = runnableCaptor.getValue();
runnable.run();
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(expectedParticipantIds1),
eq(gbid1));
verify(globalCapabilitiesDirectoryClient, times(1)).touch(ArgumentMatchers.<Callback<Void>> any(),
eq(expectedParticipantIds2),
eq(gbid2));
}
@Test
public void removeStaleProvidersOfClusterController_invokesGcdClient() {
// Test whether removeStale() of GlobalCapabiltiesDirectoryClient is called once for all known backends
// and captured argument of maxLastSeenDateMs differs from current time less than threshold.
final long currentDateMs = System.currentTimeMillis();
ArgumentCaptor<Long> maxLastSeenDateCaptor = ArgumentCaptor.forClass(Long.class);
final long toleranceMs = 200L;
localCapabilitiesDirectory.removeStaleProvidersOfClusterController();
ArgumentCaptor<String> gbidCaptor = ArgumentCaptor.forClass(String.class);
verify(globalCapabilitiesDirectoryClient,
times(knownGbids.length)).removeStale(ArgumentMatchers.<Callback<Void>> any(),
maxLastSeenDateCaptor.capture(),
gbidCaptor.capture());
assertTrue(maxLastSeenDateCaptor.getValue() <= currentDateMs);
assertTrue(currentDateMs - maxLastSeenDateCaptor.getValue() <= toleranceMs);
List<String> actualGbids = gbidCaptor.getAllValues();
assertEquals(Arrays.asList(knownGbids), actualGbids);
}
@Test
public void removeStaleProvidersOfClusterController_callsItselfOnCallbackFailure() {
// Test whether removeStaleProvidersOfClusterController() is calling itself n-times
// when callback function is calling onFailure(exception) function.
int numberOfOnFailureCalls = 2;
JoynrRuntimeException exception = new JoynrRuntimeException("removeStale failed");
for (String gbid : knownGbids) {
doAnswer(new Answer<Future<Void>>() {
private int count = 0;
@Override
public Future<Void> answer(InvocationOnMock invocation) throws Throwable {
Future<Void> result = new Future<Void>();
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
if (count++ == numberOfOnFailureCalls) {
callback.onSuccess(null);
result.onSuccess(null);
return result;
}
callback.onFailure(exception);
result.onSuccess(null);
return result;
}
}).when(globalCapabilitiesDirectoryClient)
.removeStale(ArgumentMatchers.<Callback<Void>> any(), anyLong(), eq(gbid));
}
localCapabilitiesDirectory.removeStaleProvidersOfClusterController();
int numberOfCalls = numberOfOnFailureCalls + 1; // one time success
for (String gbid : knownGbids) {
verify(globalCapabilitiesDirectoryClient,
times(numberOfCalls)).removeStale(ArgumentMatchers.<Callback<Void>> any(), anyLong(), eq(gbid));
}
}
@Test
public void removeStaleProvidersOfClusterController_calledOnceIfMessageNotSent() {
// Test whether removeStale() of GlobalCapabiltiesDirectoryClient is called once when exception
// in a gbid has a type JoynrMessageNotSentException and contains "Address type not supported" message
JoynrRuntimeException exception = new JoynrMessageNotSentException("Address type not supported");
doAnswer(new Answer<Future<Void>>() {
@Override
public Future<Void> answer(InvocationOnMock invocation) throws Throwable {
Future<Void> result = new Future<Void>();
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
callback.onFailure(exception);
result.onSuccess(null);
return result;
}
}).when(globalCapabilitiesDirectoryClient)
.removeStale(ArgumentMatchers.<Callback<Void>> any(), anyLong(), anyString());
localCapabilitiesDirectory.removeStaleProvidersOfClusterController();
for (String gbid : knownGbids) {
verify(globalCapabilitiesDirectoryClient, times(1)).removeStale(ArgumentMatchers.<Callback<Void>> any(),
anyLong(),
eq(gbid));
}
}
@Test
public void removeStaleProvidersOfClusterController_noRetryIfRetryDurationExceeded() {
final long removeStaleMaxRetryMs = 3600000;
// Set a custom value of cluster controller start time to simulate timeout for removeStale retries
final long ccStartUpDateMs = removeStaleMaxRetryMs + 1;
try {
setFieldValue(localCapabilitiesDirectory, "ccStartUpDateInMs", ccStartUpDateMs);
} catch (Exception e) {
fail("Couldn't set start date of cluster controller in milliseconds.");
}
JoynrRuntimeException exception = new JoynrRuntimeException("removeStale failed");
for (String gbid : knownGbids) {
doAnswer(new Answer<Future<Void>>() {
@Override
public Future<Void> answer(InvocationOnMock invocation) throws Throwable {
Future<Void> result = new Future<Void>();
@SuppressWarnings("unchecked")
Callback<Void> callback = (Callback<Void>) invocation.getArguments()[0];
callback.onFailure(exception);
result.onSuccess(null);
return result;
}
}).when(globalCapabilitiesDirectoryClient)
.removeStale(ArgumentMatchers.<Callback<Void>> any(), anyLong(), eq(gbid));
}
localCapabilitiesDirectory.removeStaleProvidersOfClusterController();
for (String gbid : knownGbids) {
verify(globalCapabilitiesDirectoryClient, times(1)).removeStale(ArgumentMatchers.<Callback<Void>> any(),
eq(ccStartUpDateMs),
eq(gbid));
}
}
@Test(timeout = TEST_TIMEOUT)
public void addAndRemoveAreCalledInOrder() throws InterruptedException {
final String participantId1 = "participantId1";
final String participantId2 = "participantId2";
DiscoveryEntry discoveryEntry1 = new DiscoveryEntry(discoveryEntry);
discoveryEntry1.getQos().setScope(ProviderScope.GLOBAL);
discoveryEntry1.setParticipantId(participantId1);
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry);
discoveryEntry2.setParticipantId(participantId2);
GlobalDiscoveryEntry globalDiscoveryEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry1,
globalAddress1);
GlobalDiscoveryEntry globalDiscoveryEntry2 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry2,
globalAddress1);
final boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry1, awaitGlobalRegistration);
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry2, awaitGlobalRegistration);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
InOrder inOrder = inOrder(globalCapabilitiesDirectoryClient);
ArgumentCaptor<Long> remainingTtlCapture = ArgumentCaptor.forClass(Long.class);
inOrder.verify(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
remainingTtlCapture.capture(),
any(String[].class));
checkRemainingTtl(remainingTtlCapture);
inOrder.verify(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry2)),
remainingTtlCapture.capture(),
any(String[].class));
checkRemainingTtl(remainingTtlCapture);
CountDownLatch cdl = new CountDownLatch(2);
doAnswer(createAnswerWithSuccess(cdl)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
anyString(),
any(String[].class));
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry2.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry2));
when(localDiscoveryEntryStoreMock.lookup(discoveryEntry1.getParticipantId(),
Long.MAX_VALUE)).thenReturn(Optional.of(discoveryEntry1));
localCapabilitiesDirectory.remove(discoveryEntry2.getParticipantId());
localCapabilitiesDirectory.remove(discoveryEntry1.getParticipantId());
assertTrue(cdl.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
inOrder.verify(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId2),
any(String[].class));
inOrder.verify(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId1),
any(String[].class));
}
private static class GlobalDiscoveryEntryWithParticipantIdMatcher implements ArgumentMatcher<GlobalDiscoveryEntry> {
private GlobalDiscoveryEntry expected;
private GlobalDiscoveryEntryWithParticipantIdMatcher(GlobalDiscoveryEntry expected) {
this.expected = expected;
}
@Override
public boolean matches(GlobalDiscoveryEntry argument) {
assertNotNull(argument);
GlobalDiscoveryEntry actual = (GlobalDiscoveryEntry) argument;
return expected.getParticipantId() == actual.getParticipantId()
&& expected.getAddress().equals(actual.getAddress());
}
}
private void setNewDefaultTtlAddAndRemove(long defaulTtlMs) throws ReflectiveOperationException {
Field defaulTtlMsField = LocalCapabilitiesDirectoryImpl.class.getDeclaredField("defaultTtlAddAndRemove");
defaulTtlMsField.setAccessible(true);
defaulTtlMsField.set((Object) localCapabilitiesDirectory, defaulTtlMs);
}
@Test(timeout = TEST_TIMEOUT)
public void testProcessingExpiredQueuedGcdActions() throws Exception {
reset(globalCapabilitiesDirectoryClient);
// defaultTtlAddAndRemove = 60000ms (MessagingQos.DEFAULT_TTL) is too long, we reduce it to 1000ms for the test
setNewDefaultTtlAddAndRemove(1000);
final String participantId1 = "participantId1";
final String participantId2 = "participantId2";
DiscoveryEntry discoveryEntry1 = new DiscoveryEntry(discoveryEntry);
discoveryEntry1.getQos().setScope(ProviderScope.GLOBAL);
discoveryEntry1.setParticipantId(participantId1);
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry1);
discoveryEntry2.setParticipantId(participantId2);
GlobalDiscoveryEntry globalDiscoveryEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry1,
globalAddress1);
GlobalDiscoveryEntry globalDiscoveryEntry2 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry2,
globalAddress1);
final long delay = 1500;
CountDownLatch cdlAddDelayStarted = new CountDownLatch(1);
CountDownLatch cdlAddDone = new CountDownLatch(1);
doAnswer(createAnswerWithDelayedSuccess(cdlAddDelayStarted,
cdlAddDone,
delay)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
anyLong(),
any(String[].class));
CountDownLatch cdlRemove = new CountDownLatch(1);
doAnswer(createAnswerWithSuccess(cdlRemove)).when(globalCapabilitiesDirectoryClient)
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId1),
any(String[].class));
// 3 actions. 2 lcd.add and 1 lcd.remove
final Boolean awaitGlobalRegistration = true;
Promise<DeferredVoid> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry1, awaitGlobalRegistration);
Promise<DeferredVoid> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry2, awaitGlobalRegistration);
localCapabilitiesDirectory.remove(discoveryEntry1.getParticipantId());
assertTrue(cdlAddDelayStarted.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
JoynrRuntimeException expectedException = new JoynrRuntimeException("Failed to process global registration in time, please try again");
checkPromiseException(promiseAdd2, new ProviderRuntimeException(expectedException.toString()));
// second add failed before first add has finished, remove not yet executed
assertEquals(1, cdlAddDone.getCount());
assertEquals(1, cdlRemove.getCount());
checkPromiseSuccess(promiseAdd1, "add failed");
assertTrue(cdlAddDone.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
assertTrue(cdlRemove.await(DEFAULT_WAIT_TIME_MS, TimeUnit.MILLISECONDS));
InOrder inOrder = inOrder(globalCapabilitiesDirectoryClient);
inOrder.verify(globalCapabilitiesDirectoryClient, times(1))
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
anyLong(),
any(String[].class));
inOrder.verify(globalCapabilitiesDirectoryClient, times(1))
.remove(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
eq(participantId1),
any(String[].class));
verify(globalCapabilitiesDirectoryClient,
times(0)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry2)),
anyLong(),
any(String[].class));
}
@Test(timeout = TEST_TIMEOUT)
public void testReAddAllGlobalDiscoveryEntriesPeriodically() throws InterruptedException {
final String participantId1 = "participantId1";
final String participantId2 = "participantId2";
DiscoveryEntry discoveryEntry1 = new DiscoveryEntry(discoveryEntry);
discoveryEntry1.getQos().setScope(ProviderScope.GLOBAL);
discoveryEntry1.setParticipantId(participantId1);
DiscoveryEntry discoveryEntry2 = new DiscoveryEntry(discoveryEntry1);
discoveryEntry2.setParticipantId(participantId2);
GlobalDiscoveryEntry globalDiscoveryEntry1 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry1,
globalAddress1);
GlobalDiscoveryEntry globalDiscoveryEntry2 = CapabilityUtils.discoveryEntry2GlobalDiscoveryEntry(discoveryEntry2,
globalAddress1);
final boolean awaitGlobalRegistration = true;
String[] gbids1 = new String[]{ knownGbids[0] };
String[] expectedGbids1 = gbids1.clone();
String[] gbids2 = new String[]{ knownGbids[1] };
String[] expectedGbids2 = gbids2.clone();
Promise<Add1Deferred> promiseAdd1 = localCapabilitiesDirectory.add(discoveryEntry1,
awaitGlobalRegistration,
gbids1);
Promise<Add1Deferred> promiseAdd2 = localCapabilitiesDirectory.add(discoveryEntry2,
awaitGlobalRegistration,
gbids2);
checkPromiseSuccess(promiseAdd1, "add failed");
checkPromiseSuccess(promiseAdd2, "add failed");
reset(globalCapabilitiesDirectoryClient);
CountDownLatch cdlReAdd = new CountDownLatch(2);
doAnswer(createAnswerWithSuccess(cdlReAdd)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
anyLong(),
eq(gbids1));
doAnswer(createAnswerWithSuccess(cdlReAdd)).when(globalCapabilitiesDirectoryClient)
.add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry2)),
anyLong(),
eq(gbids2));
Set<DiscoveryEntry> globalEntries = new HashSet<>();
globalEntries.add(discoveryEntry1);
globalEntries.add(discoveryEntry2);
when(localDiscoveryEntryStoreMock.getAllGlobalEntries()).thenReturn(globalEntries);
verify(globalCapabilitiesDirectoryClient, times(0)).add(any(), any(), anyLong(), any());
verify(capabilitiesFreshnessUpdateExecutor).scheduleAtFixedRate(runnableCaptor.capture(),
eq(RE_ADD_INTERVAL_DAYS),
eq(RE_ADD_INTERVAL_DAYS),
eq(TimeUnit.DAYS));
// capture the runnable and execute it to schedule the re-add task
Runnable runnable = runnableCaptor.getValue();
runnable.run();
assertTrue(cdlReAdd.await(defaultTtlAddAndRemove, TimeUnit.MILLISECONDS));
// check whether add method has been called for 2 non expired entries
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry1)),
eq(defaultTtlAddAndRemove),
eq(expectedGbids1));
verify(globalCapabilitiesDirectoryClient,
times(1)).add(ArgumentMatchers.<CallbackWithModeledError<Void, DiscoveryError>> any(),
argThat(new GlobalDiscoveryEntryWithParticipantIdMatcher(globalDiscoveryEntry2)),
eq(defaultTtlAddAndRemove),
eq(expectedGbids2));
}
}
| [Java] Adapt existing tests to new provider registration behavior
| java/core/clustercontroller/src/test/java/io/joynr/capabilities/LocalCapabilitiesDirectoryTest.java | [Java] Adapt existing tests to new provider registration behavior |
|
Java | apache-2.0 | fcbbbe82203d2944bf9729689de1c1acc3d981bf | 0 | etnetera/jmeter,apache/jmeter,apache/jmeter,benbenw/jmeter,apache/jmeter,apache/jmeter,benbenw/jmeter,apache/jmeter,ham1/jmeter,etnetera/jmeter,benbenw/jmeter,etnetera/jmeter,ham1/jmeter,ham1/jmeter,ham1/jmeter,benbenw/jmeter,etnetera/jmeter,etnetera/jmeter,ham1/jmeter | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.gui.util;
import java.awt.AWTEvent;
import java.awt.Component;
import java.awt.EventQueue;
import java.awt.Graphics;
import java.awt.event.ActionEvent;
import java.awt.event.InputEvent;
import java.awt.event.ItemEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.io.Serializable;
import javax.swing.AbstractAction;
import javax.swing.ActionMap;
import javax.swing.ButtonModel;
import javax.swing.Icon;
import javax.swing.JCheckBox;
import javax.swing.SwingUtilities;
import javax.swing.UIDefaults;
import javax.swing.UIManager;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.plaf.ActionMapUIResource;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.metal.MetalLookAndFeel;
// derived from: http://www.javaspecialists.eu/archive/Issue145.html
public final class TristateCheckBox extends JCheckBox {
private static final long serialVersionUID = 1L;
// Listener on model changes to maintain correct focusability
private final ChangeListener enableListener = new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
TristateCheckBox.this.setFocusable(
getModel().isEnabled());
}
};
public TristateCheckBox() {
this(null, null, TristateState.DESELECTED);
}
public TristateCheckBox(String text) {
this(text, null, TristateState.DESELECTED);
}
public TristateCheckBox(String text, boolean selected) {
this(text, null, selected ? TristateState.SELECTED : TristateState.DESELECTED);
}
public TristateCheckBox(String text, Icon icon, TristateState initial) {
this(text, icon, initial, false);
}
// For testing only at present
TristateCheckBox(String text, Icon icon, TristateState initial, boolean original) {
super(text, icon);
//Set default single model
setModel(new TristateButtonModel(initial, this, original));
// override action behaviour
super.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
TristateCheckBox.this.iterateState();
}
});
ActionMap actions = new ActionMapUIResource();
actions.put("pressed", new AbstractAction() {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
TristateCheckBox.this.iterateState();
}
});
actions.put("released", null);
SwingUtilities.replaceUIActionMap(this, actions);
}
// Next two methods implement new API by delegation to model
public void setIndeterminate() {
getTristateModel().setIndeterminate();
}
public boolean isIndeterminate() {
return getTristateModel().isIndeterminate();
}
public TristateState getState() {
return getTristateModel().getState();
}
//Overrides superclass method
@Override
public void setModel(ButtonModel newModel) {
super.setModel(newModel);
//Listen for enable changes
if (model instanceof TristateButtonModel)
model.addChangeListener(enableListener);
}
//Empty override of superclass method
@Override
public synchronized void addMouseListener(MouseListener l) {
}
// Mostly delegates to model
private void iterateState() {
//Maybe do nothing at all?
if (!getModel().isEnabled()) return;
grabFocus();
// Iterate state
getTristateModel().iterateState();
// Fire ActionEvent
int modifiers = 0;
AWTEvent currentEvent = EventQueue.getCurrentEvent();
if (currentEvent instanceof InputEvent) {
modifiers = ((InputEvent) currentEvent).getModifiers();
} else if (currentEvent instanceof ActionEvent) {
modifiers = ((ActionEvent) currentEvent).getModifiers();
}
fireActionPerformed(new ActionEvent(this,
ActionEvent.ACTION_PERFORMED, getText(),
System.currentTimeMillis(), modifiers));
}
//Convenience cast
public TristateButtonModel getTristateModel() {
return (TristateButtonModel) super.getModel();
}
private static class TristateButtonModel extends ToggleButtonModel {
private static final long serialVersionUID = 1L;
private TristateState state = TristateState.DESELECTED;
private final TristateCheckBox tristateCheckBox;
private final Icon icon;
private final boolean original;
public TristateButtonModel(TristateState initial,
TristateCheckBox tristateCheckBox, boolean original) {
setState(TristateState.DESELECTED);
this.tristateCheckBox = tristateCheckBox;
icon = new TristateCheckBoxIcon();
this.original = original;
}
public void setIndeterminate() {
setState(TristateState.INDETERMINATE);
}
public boolean isIndeterminate() {
return state == TristateState.INDETERMINATE;
}
// Overrides of superclass methods
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
// Restore state display
displayState();
}
@Override
public void setSelected(boolean selected) {
setState(selected ?
TristateState.SELECTED : TristateState.DESELECTED);
}
// Empty overrides of superclass methods
@Override
public void setArmed(boolean b) {
}
@Override
public void setPressed(boolean b) {
}
void iterateState() {
setState(state.next());
}
private void setState(TristateState state) {
//Set internal state
this.state = state;
displayState();
if (state == TristateState.INDETERMINATE && isEnabled()) {
// force the events to fire
// Send ChangeEvent
fireStateChanged();
// Send ItemEvent
int indeterminate = 3;
fireItemStateChanged(new ItemEvent(
this, ItemEvent.ITEM_STATE_CHANGED, this,
indeterminate));
}
}
private void displayState() {
super.setSelected(state != TristateState.DESELECTED);
if (original) {
super.setArmed(state == TristateState.INDETERMINATE);
} else {
if (state == TristateState.INDETERMINATE) {
tristateCheckBox.setIcon(icon); // Needed for all but Nimbus
tristateCheckBox.setSelectedIcon(icon); // Nimbus works - after a fashion - with this
tristateCheckBox.setDisabledSelectedIcon(icon); // Nimbus works - after a fashion - with this
} else { // reset
if (tristateCheckBox!= null){
tristateCheckBox.setIcon(null);
tristateCheckBox.setSelectedIcon(null);
tristateCheckBox.setDisabledSelectedIcon(null); // Nimbus works - after a fashion - with this
}
}
}
super.setPressed(state == TristateState.INDETERMINATE);
}
public TristateState getState() {
return state;
}
}
// derived from: http://www.coderanch.com/t/342563/GUI/java/TriState-CheckBox
private static class TristateCheckBoxIcon implements Icon, UIResource, Serializable {
private static final long serialVersionUID = 290L;
private final int iconHeight;
private final int iconWidth;
public TristateCheckBoxIcon() {
// Assume that the UI has not changed since the checkbos was created
UIDefaults defaults = UIManager.getLookAndFeelDefaults();
final Icon icon = (Icon) defaults.get("CheckBox.icon");
iconHeight = icon.getIconHeight();
iconWidth = icon.getIconWidth();
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
JCheckBox cb = (JCheckBox) c;
ButtonModel model = cb.getModel();
// TODO fix up for Nimbus LAF
if (model.isEnabled()) {
if (model.isPressed() && model.isArmed()) {
g.setColor(MetalLookAndFeel.getControlShadow());
g.fillRect(x, y, iconWidth - 1, iconHeight - 1);
drawPressed3DBorder(g, x, y, iconWidth, iconHeight);
} else {
drawFlush3DBorder(g, x, y, iconWidth, iconHeight);
}
g.setColor(MetalLookAndFeel.getControlInfo());
} else {
g.setColor(MetalLookAndFeel.getControlShadow());
g.drawRect(x, y, iconWidth - 1, iconHeight - 1);
}
drawLine(g, x, y);
// drawCross(g, x, y);
}// paintIcon
// private void drawCross(Graphics g, int x, int y) {
// g.drawLine(x + (iconWidth - 4), y + 2, x + 3, y + (iconHeight - 5));
// g.drawLine(x + (iconWidth - 4), y + 3, x + 3, y + (iconHeight - 4));
// g.drawLine(x + 3, y + 2, x + (iconWidth - 4), y + (iconHeight - 5));
// g.drawLine(x + 3, y + 3, x + (iconWidth - 4), y + (iconHeight - 4));
// }
private void drawLine(Graphics g, int x, int y) {
final int left = x + 2, right = x + (iconWidth - 4);
int height = y + iconHeight/2;
g.drawLine(left, height, right, height);
g.drawLine(left, height - 1, right, height - 1);
}
private void drawFlush3DBorder(Graphics g, int x, int y, int w, int h) {
g.translate(x, y);
g.setColor(MetalLookAndFeel.getControlDarkShadow());
g.drawRect(0, 0, w - 2, h - 2);
g.setColor(MetalLookAndFeel.getControlHighlight());
g.drawRect(1, 1, w - 2, h - 2);
g.setColor(MetalLookAndFeel.getControl());
g.drawLine(0, h - 1, 1, h - 2);
g.drawLine(w - 1, 0, w - 2, 1);
g.translate(-x, -y);
}
private void drawPressed3DBorder(Graphics g, int x, int y, int w, int h) {
g.translate(x, y);
drawFlush3DBorder(g, 0, 0, w, h);
g.setColor(MetalLookAndFeel.getControlShadow());
g.drawLine(1, 1, 1, h - 2);
g.drawLine(1, 1, w - 2, 1);
g.translate(-x, -y);
}
@Override
public int getIconWidth() {
return iconWidth;
}
@Override
public int getIconHeight() {
return iconHeight;
}
}
} | src/core/org/apache/jmeter/gui/util/TristateCheckBox.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.gui.util;
import java.awt.AWTEvent;
import java.awt.Component;
import java.awt.EventQueue;
import java.awt.Graphics;
import java.awt.event.ActionEvent;
import java.awt.event.InputEvent;
import java.awt.event.ItemEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.io.Serializable;
import javax.swing.AbstractAction;
import javax.swing.ActionMap;
import javax.swing.ButtonModel;
import javax.swing.Icon;
import javax.swing.JCheckBox;
import javax.swing.SwingUtilities;
import javax.swing.UIDefaults;
import javax.swing.UIManager;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.plaf.ActionMapUIResource;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.metal.MetalLookAndFeel;
// derived from: http://www.javaspecialists.eu/archive/Issue145.html
public final class TristateCheckBox extends JCheckBox {
private static final long serialVersionUID = 1L;
// Listener on model changes to maintain correct focusability
private final ChangeListener enableListener = new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
TristateCheckBox.this.setFocusable(
getModel().isEnabled());
}
};
public TristateCheckBox(String text) {
this(text, null, TristateState.DESELECTED);
}
// For testing only at present
TristateCheckBox(String text, boolean original) {
this(text, null, TristateState.DESELECTED, original);
}
public TristateCheckBox(String text, Icon icon, TristateState initial) {
this(text, icon, initial, false);
}
// For testing only at present
TristateCheckBox(String text, Icon icon, TristateState initial, boolean original) {
super(text, icon);
//Set default single model
setModel(new TristateButtonModel(initial, this, original));
// override action behaviour
super.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
TristateCheckBox.this.iterateState();
}
});
ActionMap actions = new ActionMapUIResource();
actions.put("pressed", new AbstractAction() {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
TristateCheckBox.this.iterateState();
}
});
actions.put("released", null);
SwingUtilities.replaceUIActionMap(this, actions);
}
// Next two methods implement new API by delegation to model
public void setIndeterminate() {
getTristateModel().setIndeterminate();
}
public boolean isIndeterminate() {
return getTristateModel().isIndeterminate();
}
public TristateState getState() {
return getTristateModel().getState();
}
//Overrides superclass method
@Override
public void setModel(ButtonModel newModel) {
super.setModel(newModel);
//Listen for enable changes
if (model instanceof TristateButtonModel)
model.addChangeListener(enableListener);
}
//Empty override of superclass method
@Override
public synchronized void addMouseListener(MouseListener l) {
}
// Mostly delegates to model
private void iterateState() {
//Maybe do nothing at all?
if (!getModel().isEnabled()) return;
grabFocus();
// Iterate state
getTristateModel().iterateState();
// Fire ActionEvent
int modifiers = 0;
AWTEvent currentEvent = EventQueue.getCurrentEvent();
if (currentEvent instanceof InputEvent) {
modifiers = ((InputEvent) currentEvent).getModifiers();
} else if (currentEvent instanceof ActionEvent) {
modifiers = ((ActionEvent) currentEvent).getModifiers();
}
fireActionPerformed(new ActionEvent(this,
ActionEvent.ACTION_PERFORMED, getText(),
System.currentTimeMillis(), modifiers));
}
//Convenience cast
public TristateButtonModel getTristateModel() {
return (TristateButtonModel) super.getModel();
}
private static class TristateButtonModel extends ToggleButtonModel {
private static final long serialVersionUID = 1L;
private TristateState state = TristateState.DESELECTED;
private final TristateCheckBox tristateCheckBox;
private final Icon icon;
private final boolean original;
public TristateButtonModel(TristateState initial,
TristateCheckBox tristateCheckBox, boolean original) {
setState(TristateState.DESELECTED);
this.tristateCheckBox = tristateCheckBox;
icon = new TristateCheckBoxIcon();
this.original = original;
}
public void setIndeterminate() {
setState(TristateState.INDETERMINATE);
}
public boolean isIndeterminate() {
return state == TristateState.INDETERMINATE;
}
// Overrides of superclass methods
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
// Restore state display
displayState();
}
@Override
public void setSelected(boolean selected) {
setState(selected ?
TristateState.SELECTED : TristateState.DESELECTED);
}
// Empty overrides of superclass methods
@Override
public void setArmed(boolean b) {
}
@Override
public void setPressed(boolean b) {
}
void iterateState() {
setState(state.next());
}
private void setState(TristateState state) {
//Set internal state
this.state = state;
displayState();
if (state == TristateState.INDETERMINATE && isEnabled()) {
// force the events to fire
// Send ChangeEvent
fireStateChanged();
// Send ItemEvent
int indeterminate = 3;
fireItemStateChanged(new ItemEvent(
this, ItemEvent.ITEM_STATE_CHANGED, this,
indeterminate));
}
}
private void displayState() {
super.setSelected(state != TristateState.DESELECTED);
if (original) {
super.setArmed(state == TristateState.INDETERMINATE);
} else {
if (state == TristateState.INDETERMINATE) {
tristateCheckBox.setIcon(icon); // Needed for all but Nimbus
tristateCheckBox.setSelectedIcon(icon); // Nimbus works - after a fashion - with this
tristateCheckBox.setDisabledSelectedIcon(icon); // Nimbus works - after a fashion - with this
} else { // reset
if (tristateCheckBox!= null){
tristateCheckBox.setIcon(null);
tristateCheckBox.setSelectedIcon(null);
tristateCheckBox.setDisabledSelectedIcon(null); // Nimbus works - after a fashion - with this
}
}
}
super.setPressed(state == TristateState.INDETERMINATE);
}
public TristateState getState() {
return state;
}
}
// derived from: http://www.coderanch.com/t/342563/GUI/java/TriState-CheckBox
private static class TristateCheckBoxIcon implements Icon, UIResource, Serializable {
private static final long serialVersionUID = 290L;
private final int iconHeight;
private final int iconWidth;
public TristateCheckBoxIcon() {
// Assume that the UI has not changed since the checkbos was created
UIDefaults defaults = UIManager.getLookAndFeelDefaults();
final Icon icon = (Icon) defaults.get("CheckBox.icon");
iconHeight = icon.getIconHeight();
iconWidth = icon.getIconWidth();
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
JCheckBox cb = (JCheckBox) c;
ButtonModel model = cb.getModel();
// TODO fix up for Nimbus LAF
if (model.isEnabled()) {
if (model.isPressed() && model.isArmed()) {
g.setColor(MetalLookAndFeel.getControlShadow());
g.fillRect(x, y, iconWidth - 1, iconHeight - 1);
drawPressed3DBorder(g, x, y, iconWidth, iconHeight);
} else {
drawFlush3DBorder(g, x, y, iconWidth, iconHeight);
}
g.setColor(MetalLookAndFeel.getControlInfo());
} else {
g.setColor(MetalLookAndFeel.getControlShadow());
g.drawRect(x, y, iconWidth - 1, iconHeight - 1);
}
drawLine(g, x, y);
// drawCross(g, x, y);
}// paintIcon
// private void drawCross(Graphics g, int x, int y) {
// g.drawLine(x + (iconWidth - 4), y + 2, x + 3, y + (iconHeight - 5));
// g.drawLine(x + (iconWidth - 4), y + 3, x + 3, y + (iconHeight - 4));
// g.drawLine(x + 3, y + 2, x + (iconWidth - 4), y + (iconHeight - 5));
// g.drawLine(x + 3, y + 3, x + (iconWidth - 4), y + (iconHeight - 4));
// }
private void drawLine(Graphics g, int x, int y) {
final int left = x + 2, right = x + (iconWidth - 4);
int height = y + iconHeight/2;
g.drawLine(left, height, right, height);
g.drawLine(left, height - 1, right, height - 1);
}
private void drawFlush3DBorder(Graphics g, int x, int y, int w, int h) {
g.translate(x, y);
g.setColor(MetalLookAndFeel.getControlDarkShadow());
g.drawRect(0, 0, w - 2, h - 2);
g.setColor(MetalLookAndFeel.getControlHighlight());
g.drawRect(1, 1, w - 2, h - 2);
g.setColor(MetalLookAndFeel.getControl());
g.drawLine(0, h - 1, 1, h - 2);
g.drawLine(w - 1, 0, w - 2, 1);
g.translate(-x, -y);
}
private void drawPressed3DBorder(Graphics g, int x, int y, int w, int h) {
g.translate(x, y);
drawFlush3DBorder(g, 0, 0, w, h);
g.setColor(MetalLookAndFeel.getControlShadow());
g.drawLine(1, 1, 1, h - 2);
g.drawLine(1, 1, w - 2, 1);
g.translate(-x, -y);
}
@Override
public int getIconWidth() {
return iconWidth;
}
@Override
public int getIconHeight() {
return iconHeight;
}
}
} | Add extra ctors for more compatibility with JCheckBox
git-svn-id: https://svn.apache.org/repos/asf/jmeter/trunk@1418390 13f79535-47bb-0310-9956-ffa450edef68
Former-commit-id: d4a4d08d4cee4b1905a44fb2f1b2ac8f3168956c | src/core/org/apache/jmeter/gui/util/TristateCheckBox.java | Add extra ctors for more compatibility with JCheckBox |
|
Java | apache-2.0 | ae08d0362dd2eeded5532f0992b7e028eb929c47 | 0 | ricepanda/rice,ricepanda/rice,ricepanda/rice,ricepanda/rice | /**
* Copyright 2005-2013 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.service.impl;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.StopWatch;
import org.kuali.rice.core.api.CoreApiServiceLocator;
import org.kuali.rice.core.api.config.ConfigurationException;
import org.kuali.rice.core.api.config.property.ConfigurationService;
import org.kuali.rice.core.api.datetime.DateTimeService;
import org.kuali.rice.core.api.util.RiceKeyConstants;
import org.kuali.rice.core.framework.persistence.jta.TransactionalNoValidationExceptionRollback;
import org.kuali.rice.kew.api.WorkflowDocument;
import org.kuali.rice.kew.api.exception.WorkflowException;
import org.kuali.rice.kim.api.identity.Person;
import org.kuali.rice.kim.api.identity.PersonService;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import org.kuali.rice.krad.UserSession;
import org.kuali.rice.krad.UserSessionUtils;
import org.kuali.rice.krad.bo.AdHocRoutePerson;
import org.kuali.rice.krad.bo.AdHocRouteRecipient;
import org.kuali.rice.krad.bo.AdHocRouteWorkgroup;
import org.kuali.rice.krad.bo.BusinessObject;
import org.kuali.rice.krad.bo.DocumentHeader;
import org.kuali.rice.krad.bo.Note;
import org.kuali.rice.krad.bo.PersistableBusinessObject;
import org.kuali.rice.krad.datadictionary.exception.UnknownDocumentTypeException;
import org.kuali.rice.krad.document.Document;
import org.kuali.rice.krad.document.DocumentAuthorizer;
import org.kuali.rice.krad.document.DocumentPresentationController;
import org.kuali.rice.krad.exception.DocumentAuthorizationException;
import org.kuali.rice.krad.exception.ValidationException;
import org.kuali.rice.krad.maintenance.Maintainable;
import org.kuali.rice.krad.maintenance.MaintenanceDocument;
import org.kuali.rice.krad.maintenance.MaintenanceDocumentBase;
import org.kuali.rice.krad.rules.rule.event.ApproveDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.BlanketApproveDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.CompleteDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.KualiDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.RouteDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.SaveDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.SaveEvent;
import org.kuali.rice.krad.service.DataDictionaryService;
import org.kuali.rice.krad.service.DocumentAdHocService;
import org.kuali.rice.krad.service.DocumentDictionaryService;
import org.kuali.rice.krad.service.DocumentHeaderService;
import org.kuali.rice.krad.service.DocumentService;
import org.kuali.rice.krad.service.KRADServiceLocator;
import org.kuali.rice.krad.service.KRADServiceLocatorWeb;
import org.kuali.rice.krad.service.LegacyDataAdapter;
import org.kuali.rice.krad.service.NoteService;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.KRADConstants;
import org.kuali.rice.krad.util.KRADUtils;
import org.kuali.rice.krad.util.NoteType;
import org.kuali.rice.krad.workflow.service.WorkflowDocumentService;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.dao.OptimisticLockingFailureException;
/**
* Service implementation for the Document structure. It contains all of the document level type of
* processing and calling back into documents for various centralization of functionality. This is the default,
* Kuali delivered implementation which utilizes Workflow.
*
* @author Kuali Rice Team ([email protected])
*/
@TransactionalNoValidationExceptionRollback
public class DocumentServiceImpl implements DocumentService {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(DocumentServiceImpl.class);
protected DateTimeService dateTimeService;
protected NoteService noteService;
protected WorkflowDocumentService workflowDocumentService;
protected LegacyDataAdapter legacyDataAdapter;
protected DataDictionaryService dataDictionaryService;
protected DocumentDictionaryService documentDictionaryService;
protected PersonService personService;
protected ConfigurationService kualiConfigurationService;
protected DocumentHeaderService documentHeaderService;
protected DocumentAdHocService documentAdHocService;
/**
* @see org.kuali.rice.krad.service.DocumentService#saveDocument(org.kuali.rice.krad.document.Document)
*/
@Override
public Document saveDocument(Document document) throws WorkflowException, ValidationException {
return saveDocument(document, SaveDocumentEvent.class);
}
@Override
public Document saveDocument(Document document,
Class<? extends KualiDocumentEvent> kualiDocumentEventClass) throws WorkflowException, ValidationException {
checkForNulls(document);
if (kualiDocumentEventClass == null) {
throw new IllegalArgumentException("invalid (null) kualiDocumentEventClass");
}
// if event is not an instance of a SaveDocumentEvent or a SaveOnlyDocumentEvent
if (!SaveEvent.class.isAssignableFrom(kualiDocumentEventClass)) {
throw new ConfigurationException("The KualiDocumentEvent class '" + kualiDocumentEventClass.getName() +
"' does not implement the class '" + SaveEvent.class.getName() + "'");
}
// if (!getDocumentActionFlags(document).getCanSave()) {
// throw buildAuthorizationException("save", document);
// }
document.prepareForSave();
Document savedDocument = validateAndPersistDocumentAndSaveAdHocRoutingRecipients(document,
generateKualiDocumentEvent(document, kualiDocumentEventClass));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService().save(savedDocument.getDocumentHeader().getWorkflowDocument(), null);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
return savedDocument;
}
private KualiDocumentEvent generateKualiDocumentEvent(Document document,
Class<? extends KualiDocumentEvent> eventClass) throws ConfigurationException {
String potentialErrorMessage =
"Found error trying to generate Kuali Document Event using event class '" + eventClass.getName() +
"' for document " + document.getDocumentNumber();
try {
Constructor<?> usableConstructor = null;
List<Object> paramList = new ArrayList<Object>();
for (Constructor<?> currentConstructor : eventClass.getConstructors()) {
for (Class<?> parameterClass : currentConstructor.getParameterTypes()) {
if (Document.class.isAssignableFrom(parameterClass)) {
usableConstructor = currentConstructor;
paramList.add(document);
} else {
paramList.add(null);
}
}
if (KRADUtils.isNotNull(usableConstructor)) {
break;
}
}
if (usableConstructor == null) {
throw new RuntimeException("Cannot find a constructor for class '" + eventClass.getName() +
"' that takes in a document parameter");
}
return (KualiDocumentEvent) usableConstructor.newInstance(paramList.toArray());
} catch (SecurityException e) {
throw new ConfigurationException(potentialErrorMessage, e);
} catch (IllegalArgumentException e) {
throw new ConfigurationException(potentialErrorMessage, e);
} catch (InstantiationException e) {
throw new ConfigurationException(potentialErrorMessage, e);
} catch (IllegalAccessException e) {
throw new ConfigurationException(potentialErrorMessage, e);
} catch (InvocationTargetException e) {
throw new ConfigurationException(potentialErrorMessage, e);
}
}
/**
* @see org.kuali.rice.krad.service.DocumentService#routeDocument(org.kuali.rice.krad.document.Document,
* java.lang.String, java.util.List)
*/
@Override
public Document routeDocument(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws ValidationException, WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanRoute()) {
// throw buildAuthorizationException("route", document);
//}
document.prepareForSave();
Document savedDocument = validateAndPersistDocument(document, new RouteDocumentEvent(document));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService()
.route(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation, adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#approveDocument(org.kuali.rice.krad.document.Document,
* java.lang.String,
* java.util.List)
*/
@Override
public Document approveDocument(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws ValidationException, WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanApprove()) {
// throw buildAuthorizationException("approve", document);
//}
document.prepareForSave();
Document savedDocument = validateAndPersistDocument(document, new ApproveDocumentEvent(document));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService()
.approve(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation, adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#superUserApproveDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document superUserApproveDocument(Document document, String annotation) throws WorkflowException {
Document savedDocument = getLegacyDataAdapter().saveDocument(document);
// Need to preserve the workflow document header, which just got left behind
savedDocument.getDocumentHeader().setWorkflowDocument(document.getDocumentHeader().getWorkflowDocument());
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService().superUserApprove(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#superUserCancelDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document superUserCancelDocument(Document document, String annotation) throws WorkflowException {
Document savedDocument = getLegacyDataAdapter().saveDocument(document);
// Need to preserve the workflow document header, which just got left behind
savedDocument.getDocumentHeader().setWorkflowDocument(document.getDocumentHeader().getWorkflowDocument());
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService().superUserCancel(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#superUserCancelDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document superUserDisapproveDocument(Document document, String annotation) throws WorkflowException {
Document savedDocument = getLegacyDataAdapter().saveDocument(document);
// Need to preserve the workflow document header, which just got left behind
savedDocument.getDocumentHeader().setWorkflowDocument(document.getDocumentHeader().getWorkflowDocument());
return superUserDisapproveDocumentWithoutSaving(savedDocument, annotation);
}
/**
* @see org.kuali.rice.krad.service.DocumentService#superUserCancelDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document superUserDisapproveDocumentWithoutSaving(Document document, String annotation) throws WorkflowException {
prepareWorkflowDocument(document);
getWorkflowDocumentService()
.superUserDisapprove(document.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#disapproveDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document disapproveDocument(Document document, String annotation) throws Exception {
checkForNulls(document);
Note note = createNoteFromDocument(document, annotation);
//if note type is BO, override and link disapprove notes to Doc Header
if (document.getNoteType().equals(NoteType.BUSINESS_OBJECT)) {
note.setNoteTypeCode(NoteType.DOCUMENT_HEADER.getCode());
note.setRemoteObjectIdentifier(document.getDocumentHeader().getObjectId());
}
document.addNote(note);
//SAVE THE NOTE
//Note: This save logic is replicated here and in KualiDocumentAction, when to save (based on doc state) should be moved
// into a doc service method
getNoteService().save(note);
prepareWorkflowDocument(document);
getWorkflowDocumentService().disapprove(document.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#cancelDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document cancelDocument(Document document, String annotation) throws WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanCancel()) {
// throw buildAuthorizationException("cancel", document);
//}
if (document instanceof MaintenanceDocument) {
MaintenanceDocument maintDoc = ((MaintenanceDocument) document);
if (maintDoc.getOldMaintainableObject() != null &&
(maintDoc.getOldMaintainableObject().getDataObject() instanceof BusinessObject)) {
((BusinessObject) maintDoc.getOldMaintainableObject().getDataObject()).refresh();
}
if (maintDoc.getNewMaintainableObject().getDataObject() instanceof BusinessObject) {
((BusinessObject) maintDoc.getNewMaintainableObject().getDataObject()).refresh();
}
}
prepareWorkflowDocument(document);
getWorkflowDocumentService().cancel(document.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
@Override
public Document recallDocument(Document document, String annotation, boolean cancel) throws WorkflowException {
checkForNulls(document);
Note note = createNoteFromDocument(document, annotation);
document.addNote(note);
getNoteService().save(note);
prepareWorkflowDocument(document);
getWorkflowDocumentService().recall(document.getDocumentHeader().getWorkflowDocument(), annotation, cancel);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#acknowledgeDocument(org.kuali.rice.krad.document.Document,
* java.lang.String,
* java.util.List)
*/
@Override
public Document acknowledgeDocument(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanAcknowledge()) {
// throw buildAuthorizationException("acknowledge", document);
//}
prepareWorkflowDocument(document);
getWorkflowDocumentService()
.acknowledge(document.getDocumentHeader().getWorkflowDocument(), annotation, adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#blanketApproveDocument(org.kuali.rice.krad.document.Document,
* java.lang.String,
* java.util.List)
*/
@Override
public Document blanketApproveDocument(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws ValidationException, WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanBlanketApprove()) {
// throw buildAuthorizationException("blanket approve", document);
//}
document.prepareForSave();
Document savedDocument = validateAndPersistDocument(document, new BlanketApproveDocumentEvent(document));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService()
.blanketApprove(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation, adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#clearDocumentFyi(org.kuali.rice.krad.document.Document,
* java.util.List)
*/
@Override
public Document clearDocumentFyi(Document document,
List<AdHocRouteRecipient> adHocRecipients) throws WorkflowException {
checkForNulls(document);
// populate document content so searchable attributes will be indexed properly
document.populateDocumentForRouting();
getWorkflowDocumentService().clearFyi(document.getDocumentHeader().getWorkflowDocument(), adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#completeDocument(org.kuali.rice.krad.document.Document,
* java.lang.String,
* java.util.List)
*/
@Override
public Document completeDocument(Document document, String annotation,
List adHocRecipients) throws WorkflowException {
checkForNulls(document);
document.prepareForSave();
Document savedDocument = validateAndPersistDocument(document, new CompleteDocumentEvent(document));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService().complete(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation,
adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
protected void checkForNulls(Document document) {
if (document == null) {
throw new IllegalArgumentException("invalid (null) document");
}
if (document.getDocumentNumber() == null) {
throw new IllegalStateException("invalid (null) documentHeaderId");
}
}
private Document validateAndPersistDocumentAndSaveAdHocRoutingRecipients(Document document,
KualiDocumentEvent event) {
/*
* Using this method to wrap validateAndPersistDocument to keep everything in one transaction. This avoids modifying the
* signature on validateAndPersistDocument method
*/
List<AdHocRouteRecipient> adHocRoutingRecipients = new ArrayList<AdHocRouteRecipient>();
adHocRoutingRecipients.addAll(document.getAdHocRoutePersons());
adHocRoutingRecipients.addAll(document.getAdHocRouteWorkgroups());
documentAdHocService.replaceAdHocsForDocument( document.getDocumentNumber(), adHocRoutingRecipients );
return validateAndPersistDocument(document, event);
}
/**
* @see org.kuali.rice.krad.service.DocumentService#documentExists(java.lang.String)
*/
@Override
public boolean documentExists(String documentHeaderId) {
// validate parameters
if (StringUtils.isBlank(documentHeaderId)) {
throw new IllegalArgumentException("invalid (blank) documentHeaderId");
}
boolean internalUserSession = false;
try {
// KFSMI-2543 - allowed method to run without a user session so it can be used
// by workflow processes
if (GlobalVariables.getUserSession() == null) {
internalUserSession = true;
GlobalVariables.setUserSession(new UserSession(KRADConstants.SYSTEM_USER));
GlobalVariables.clear();
}
// look for workflowDocumentHeader, since that supposedly won't break the transaction
if (getWorkflowDocumentService().workflowDocumentExists(documentHeaderId)) {
// look for docHeaderId, since that fails without breaking the transaction
return documentHeaderService.getDocumentHeaderById(documentHeaderId) != null;
}
return false;
} finally {
// if a user session was established for this call, clear it our
if (internalUserSession) {
GlobalVariables.clear();
GlobalVariables.setUserSession(null);
}
}
}
/**
* Creates a new document by class.
*
* @see org.kuali.rice.krad.service.DocumentService#getNewDocument(java.lang.Class)
*/
@Override
public Document getNewDocument(Class<? extends Document> documentClass) throws WorkflowException {
if (documentClass == null) {
throw new IllegalArgumentException("invalid (null) documentClass");
}
if (!Document.class.isAssignableFrom(documentClass)) {
throw new IllegalArgumentException("invalid (non-Document) documentClass");
}
String documentTypeName = getDataDictionaryService().getDocumentTypeNameByClass(documentClass);
if (StringUtils.isBlank(documentTypeName)) {
throw new UnknownDocumentTypeException(
"unable to get documentTypeName for unknown documentClass '" + documentClass.getName() + "'");
}
return getNewDocument(documentTypeName);
}
/**
* Creates a new document by document type name. The principal name
* passed in will be used as the document initiator. If the initiatorPrincipalNm
* is null or blank, the current user will be used.
*
* @see org.kuali.rice.krad.service.DocumentService#getNewDocument(String, String)
*/
@Override
public Document getNewDocument(String documentTypeName, String initiatorPrincipalNm) throws WorkflowException {
// argument validation
String watchName = "DocumentServiceImpl.getNewDocument";
StopWatch watch = new StopWatch();
watch.start();
if (LOG.isDebugEnabled()) {
LOG.debug(watchName + ": started");
}
if (StringUtils.isBlank(documentTypeName)) {
throw new IllegalArgumentException("invalid (blank) documentTypeName");
}
if (GlobalVariables.getUserSession() == null) {
throw new IllegalStateException(
"GlobalVariables must be populated with a valid UserSession before a new document can be created");
}
// get the class for this docTypeName
Class<? extends Document> documentClass = getDocumentClassByTypeName(documentTypeName);
// get the initiator
Person initiator = null;
if (StringUtils.isBlank(initiatorPrincipalNm)) {
initiator = GlobalVariables.getUserSession().getPerson();
} else {
initiator = KimApiServiceLocator.getPersonService().getPersonByPrincipalName(initiatorPrincipalNm);
if (initiator == null) {
initiator = GlobalVariables.getUserSession().getPerson();
}
}
// get the authorization
DocumentAuthorizer documentAuthorizer = getDocumentDictionaryService().getDocumentAuthorizer(documentTypeName);
DocumentPresentationController documentPresentationController =
getDocumentDictionaryService().getDocumentPresentationController(documentTypeName);
// make sure this person is authorized to initiate
if ( LOG.isDebugEnabled() ) {
LOG.debug("calling canInitiate from getNewDocument(" + documentTypeName + "," + initiatorPrincipalNm + ")");
}
if (!documentPresentationController.canInitiate(documentTypeName) ||
!documentAuthorizer.canInitiate(documentTypeName, initiator)) {
throw new DocumentAuthorizationException(initiator.getPrincipalName(), "initiate", documentTypeName);
}
// initiate new workflow entry, get the workflow doc
WorkflowDocument workflowDocument = getWorkflowDocumentService().createWorkflowDocument(documentTypeName, initiator);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(), workflowDocument);
// create a new document header object
DocumentHeader documentHeader = new DocumentHeader();
documentHeader.setWorkflowDocument(workflowDocument);
documentHeader.setDocumentNumber(workflowDocument.getDocumentId());
// build Document of specified type
Document document = null;
try {
// all maintenance documents have same class
if (MaintenanceDocumentBase.class.isAssignableFrom(documentClass)) {
Class<?>[] defaultConstructor = new Class[]{String.class};
Constructor<? extends Document> cons = documentClass.getConstructor(defaultConstructor);
if (cons == null) {
throw new ConfigurationException(
"Could not find constructor with document type name parameter needed for Maintenance Document Base class");
}
document = cons.newInstance(documentTypeName);
} else {
// non-maintenance document
document = documentClass.newInstance();
}
} catch (IllegalAccessException e) {
throw new RuntimeException("Error instantiating Document", e);
} catch (InstantiationException e) {
throw new RuntimeException("Error instantiating Document", e);
} catch (SecurityException e) {
throw new RuntimeException("Error instantiating Maintenance Document", e);
} catch (NoSuchMethodException e) {
throw new RuntimeException(
"Error instantiating Maintenance Document: No constructor with String parameter found", e);
} catch (IllegalArgumentException e) {
throw new RuntimeException("Error instantiating Maintenance Document", e);
} catch (InvocationTargetException e) {
throw new RuntimeException("Error instantiating Maintenance Document", e);
}
document.setDocumentHeader(documentHeader);
document.setDocumentNumber(documentHeader.getDocumentNumber());
watch.stop();
if (LOG.isDebugEnabled()) {
LOG.debug(watchName + ": " + watch.toString());
}
return document;
}
/**
* Creates a new document by document type name.
*
* @see org.kuali.rice.krad.service.DocumentService#getNewDocument(java.lang.String)
*/
@Override
public Document getNewDocument(String documentTypeName) throws WorkflowException {
return getNewDocument(documentTypeName, null);
}
/**
* This is temporary until workflow 2.0 and reads from a table to get documents whose status has changed to A
* (approved - no
* outstanding approval actions requested)
*
* @param documentHeaderId
* @return Document
* @throws WorkflowException
*/
@Override
public Document getByDocumentHeaderId(String documentHeaderId) throws WorkflowException {
if (documentHeaderId == null) {
throw new IllegalArgumentException("invalid (null) documentHeaderId");
}
boolean internalUserSession = false;
try {
// KFSMI-2543 - allowed method to run without a user session so it can be used
// by workflow processes
if (GlobalVariables.getUserSession() == null) {
internalUserSession = true;
GlobalVariables.setUserSession(new UserSession(KRADConstants.SYSTEM_USER));
GlobalVariables.clear();
}
WorkflowDocument workflowDocument = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Retrieving doc id: " + documentHeaderId + " from workflow service.");
}
workflowDocument = getWorkflowDocumentService()
.loadWorkflowDocument(documentHeaderId, GlobalVariables.getUserSession().getPerson());
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(), workflowDocument);
Class<? extends Document> documentClass = getDocumentClassByTypeName(workflowDocument.getDocumentTypeName());
// retrieve the Document
Document document = getLegacyDataAdapter().findByDocumentHeaderId(documentClass, documentHeaderId);
return postProcessDocument(documentHeaderId, workflowDocument, document);
} finally {
// if a user session was established for this call, clear it out
if (internalUserSession) {
GlobalVariables.clear();
GlobalVariables.setUserSession(null);
}
}
}
/**
* @see org.kuali.rice.krad.service.DocumentService#getByDocumentHeaderIdSessionless(java.lang.String)
*/
@Override
public Document getByDocumentHeaderIdSessionless(String documentHeaderId) throws WorkflowException {
if (documentHeaderId == null) {
throw new IllegalArgumentException("invalid (null) documentHeaderId");
}
WorkflowDocument workflowDocument = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Retrieving doc id: " + documentHeaderId + " from workflow service.");
}
Person person = getPersonService().getPersonByPrincipalName(KRADConstants.SYSTEM_USER);
workflowDocument = workflowDocumentService.loadWorkflowDocument(documentHeaderId, person);
Class<? extends Document> documentClass = getDocumentClassByTypeName(workflowDocument.getDocumentTypeName());
// retrieve the Document
Document document = getLegacyDataAdapter().findByDocumentHeaderId(documentClass, documentHeaderId);
return postProcessDocument(documentHeaderId, workflowDocument, document);
}
private Class<? extends Document> getDocumentClassByTypeName(String documentTypeName) {
if (StringUtils.isBlank(documentTypeName)) {
throw new IllegalArgumentException("invalid (blank) documentTypeName");
}
Class<? extends Document> clazz = getDataDictionaryService().getDocumentClassByTypeName(documentTypeName);
if (clazz == null) {
throw new UnknownDocumentTypeException(
"unable to get class for unknown documentTypeName '" + documentTypeName + "'");
}
return clazz;
}
/**
* Loads the Notes for the note target on this Document.
*
* @param document the document for which to load the notes
*/
protected void loadNotes(final Document document) {
if (isNoteTargetReady(document)) {
Object legacyObjectClass;
if (document instanceof MaintenanceDocument) {
MaintenanceDocument mdoc = (MaintenanceDocument) document;
legacyObjectClass = ((Maintainable) org.apache.commons.lang.ObjectUtils.defaultIfNull(mdoc.getOldMaintainableObject(), mdoc.getNewMaintainableObject())).getDataObjectClass();
} else {
legacyObjectClass = document.getClass();
}
List<Note> notes = new ArrayList<Note>();
if (StringUtils.isNotBlank(document.getNoteTarget().getObjectId())) {
notes.addAll(getNoteService().getByRemoteObjectId(document.getNoteTarget().getObjectId()));
}
//notes created on 'disapprove' are linked to Doc Header, so this checks that even if notetype = BO
if (document.getNoteType().equals(NoteType.BUSINESS_OBJECT) && document.getDocumentHeader()
.getWorkflowDocument().isDisapproved()) {
notes.addAll(getNoteService().getByRemoteObjectId(document.getDocumentHeader().getObjectId()));
}
document.setNotes(notes);
}
}
/**
* Performs required post-processing for every document from the documentDao
*
* @param documentHeaderId
* @param workflowDocument
* @param document
*/
private Document postProcessDocument(String documentHeaderId, WorkflowDocument workflowDocument, Document document) {
if (document != null) {
document.getDocumentHeader().setWorkflowDocument(workflowDocument);
document.processAfterRetrieve();
loadNotes(document);
}
return document;
}
/**
* The default implementation - this retrieves all documents by a list of documentHeader for a given class.
*
* @see org.kuali.rice.krad.service.DocumentService#getDocumentsByListOfDocumentHeaderIds(java.lang.Class,
* java.util.List)
*/
@Override
public List<Document> getDocumentsByListOfDocumentHeaderIds(Class<? extends Document> documentClass,
List<String> documentHeaderIds) throws WorkflowException {
// validate documentHeaderIdList and contents
if (documentHeaderIds == null) {
throw new IllegalArgumentException("invalid (null) documentHeaderId list");
}
int index = 0;
for (String documentHeaderId : documentHeaderIds) {
if (StringUtils.isBlank(documentHeaderId)) {
throw new IllegalArgumentException("invalid (blank) documentHeaderId at list index " + index);
}
index++;
}
boolean internalUserSession = false;
try {
// KFSMI-2543 - allowed method to run without a user session so it can be used
// by workflow processes
if (GlobalVariables.getUserSession() == null) {
internalUserSession = true;
GlobalVariables.setUserSession(new UserSession(KRADConstants.SYSTEM_USER));
GlobalVariables.clear();
}
// retrieve all documents that match the document header ids
List<? extends Document> rawDocuments = getLegacyDataAdapter().findByDocumentHeaderIds(documentClass,
documentHeaderIds);
// post-process them
List<Document> documents = new ArrayList<Document>();
for (Document document : rawDocuments) {
WorkflowDocument workflowDocument = getWorkflowDocumentService().loadWorkflowDocument(document.getDocumentNumber(), GlobalVariables.getUserSession().getPerson());
document = postProcessDocument(document.getDocumentNumber(), workflowDocument, document);
documents.add(document);
}
return documents;
} finally {
// if a user session was established for this call, clear it our
if (internalUserSession) {
GlobalVariables.clear();
GlobalVariables.setUserSession(null);
}
}
}
/* Helper Methods */
/**
* Validates and persists a document.
*/
@Override
public Document validateAndPersistDocument(Document document, KualiDocumentEvent event) throws ValidationException {
if (document == null) {
LOG.error("document passed to validateAndPersist was null");
throw new IllegalArgumentException("invalid (null) document");
}
if (LOG.isDebugEnabled()) {
LOG.debug("validating and preparing to persist document " + document.getDocumentNumber());
}
document.validateBusinessRules(event);
document.prepareForSave(event);
// save the document
Document savedDocument = null;
try {
if (LOG.isInfoEnabled()) {
LOG.info("storing document " + document.getDocumentNumber());
}
savedDocument = getLegacyDataAdapter().saveDocument(document);
// Need to preserve the workflow document header, which just got left behind
savedDocument.getDocumentHeader().setWorkflowDocument(document.getDocumentHeader().getWorkflowDocument());
} catch (OptimisticLockingFailureException e) {
LOG.error("exception encountered on store of document " + e.getMessage());
throw e;
}
boolean notesSaved = saveDocumentNotes(savedDocument);
if (!notesSaved) {
if (LOG.isInfoEnabled()) {
LOG.info(
"Notes not saved during validateAndPersistDocument, likely means that note save needs to be deferred because note target is not ready.");
}
}
savedDocument.postProcessSave(event);
return savedDocument;
}
/**
* Sets the title and app document id in the flex document
*
* @param document
* @throws org.kuali.rice.kew.api.exception.WorkflowException
*/
@Override
public void prepareWorkflowDocument(Document document) throws WorkflowException {
// populate document content so searchable attributes will be indexed properly
document.populateDocumentForRouting();
// make sure we push the document title into the workflowDocument
populateDocumentTitle(document);
// make sure we push the application document id into the workflowDocument
populateApplicationDocumentId(document);
}
/**
* This method will grab the generated document title from the document and add it to the workflowDocument so that
* it gets pushed into
* workflow when routed.
*
* @param document
* @throws org.kuali.rice.kew.api.exception.WorkflowException
*/
private void populateDocumentTitle(Document document) throws WorkflowException {
String documentTitle = document.getDocumentTitle();
if (StringUtils.isNotBlank(documentTitle)) {
document.getDocumentHeader().getWorkflowDocument().setTitle(documentTitle);
}
}
/**
* This method will grab the organization document number from the document and add it to the workflowDocument so
* that it gets pushed
* into workflow when routed.
*
* @param document
*/
private void populateApplicationDocumentId(Document document) {
String organizationDocumentNumber = document.getDocumentHeader().getOrganizationDocumentNumber();
if (StringUtils.isNotBlank(organizationDocumentNumber)) {
document.getDocumentHeader().getWorkflowDocument().setApplicationDocumentId(organizationDocumentNumber);
}
}
/**
* This is to allow for updates of document statuses and other related requirements for updates outside of the
* initial save and
* route
*/
@Override
public Document updateDocument(Document document) {
checkForNulls(document);
Document savedDocument = getLegacyDataAdapter().saveDocument(document);
// Need to preserve the workflow document header, which just got left behind
savedDocument.getDocumentHeader().setWorkflowDocument(document.getDocumentHeader().getWorkflowDocument());
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#createNoteFromDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Note createNoteFromDocument(Document document, String text) {
Note note = new Note();
note.setNotePostedTimestamp(getDateTimeService().getCurrentTimestamp());
note.setVersionNumber(Long.valueOf(1));
note.setNoteText(text);
note.setNoteTypeCode(document.getNoteType().getCode());
PersistableBusinessObject bo = document.getNoteTarget();
// TODO gah! this is awful
Person kualiUser = GlobalVariables.getUserSession().getPerson();
if (kualiUser == null) {
throw new IllegalStateException("Current UserSession has a null Person.");
}
return bo == null ? null : getNoteService().createNote(note, bo, kualiUser.getPrincipalId());
}
/**
* @see org.kuali.rice.krad.service.DocumentService#saveDocumentNotes(org.kuali.rice.krad.document.Document)
*/
@Override
public boolean saveDocumentNotes(Document document) {
if (isNoteTargetReady(document)) {
List<Note> notes = document.getNotes();
for (Note note : document.getNotes()) {
linkNoteRemoteObjectId(note, document.getNoteTarget());
}
getNoteService().saveNoteList(notes);
return true;
}
return false;
}
/**
* @see org.kuali.rice.krad.service.DocumentService
*/
@Override
public void sendNoteRouteNotification(Document document, Note note, Person sender) throws WorkflowException {
AdHocRouteRecipient routeRecipient = note.getAdHocRouteRecipient();
// build notification request
Person requestedUser = this.getPersonService().getPersonByPrincipalName(routeRecipient.getId());
String senderName = sender.getFirstName() + " " + sender.getLastName();
String requestedName = requestedUser.getFirstName() + " " + requestedUser.getLastName();
String notificationText =
kualiConfigurationService.getPropertyValueAsString(
RiceKeyConstants.MESSAGE_NOTE_NOTIFICATION_ANNOTATION);
if (StringUtils.isBlank(notificationText)) {
throw new RuntimeException(
"No annotation message found for note notification. Message needs added to application resources with key:" +
RiceKeyConstants.MESSAGE_NOTE_NOTIFICATION_ANNOTATION);
}
notificationText =
MessageFormat.format(notificationText, new Object[]{senderName, requestedName, note.getNoteText()});
List<AdHocRouteRecipient> routeRecipients = new ArrayList<AdHocRouteRecipient>();
routeRecipients.add(routeRecipient);
workflowDocumentService
.sendWorkflowNotification(document.getDocumentHeader().getWorkflowDocument(), notificationText,
routeRecipients, KRADConstants.NOTE_WORKFLOW_NOTIFICATION_REQUEST_LABEL);
// clear recipient allowing an notification to be sent to another person
note.setAdHocRouteRecipient(new AdHocRoutePerson());
}
/**
* Determines if the given document's note target is ready for notes to be
* attached and persisted against it. This method verifies that the document's
* note target is non-null as well as checking that it has a non-empty object id.
*
* @param document the document on which to check for note target readiness
* @return true if the note target is ready, false otherwise
*/
protected boolean isNoteTargetReady(Document document) {
//special case for disappoved documents
if (document.getDocumentHeader().getWorkflowDocument().isDisapproved()) {
return true;
}
PersistableBusinessObject noteTarget = document.getNoteTarget();
if (noteTarget == null || StringUtils.isBlank(noteTarget.getObjectId())) {
return false;
}
return true;
}
private void linkNoteRemoteObjectId(Note note, PersistableBusinessObject noteTarget) {
String objectId = noteTarget.getObjectId();
if (StringUtils.isBlank(objectId)) {
throw new IllegalStateException(
"Attempted to link a Note with a PersistableBusinessObject with no object id");
}
note.setRemoteObjectIdentifier(noteTarget.getObjectId());
}
/**
* @see org.kuali.rice.krad.service.DocumentService#sendAdHocRequests(org.kuali.rice.krad.document.Document, String, java.util.List)
*/
@Override
public void sendAdHocRequests(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws WorkflowException {
prepareWorkflowDocument(document);
getWorkflowDocumentService()
.sendWorkflowNotification(document.getDocumentHeader().getWorkflowDocument(), annotation,
adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
}
private void removeAdHocPersonsAndWorkgroups(Document document) {
documentAdHocService.replaceAdHocsForDocument(document.getDocumentNumber(), null);
document.setAdHocRoutePersons(new ArrayList<AdHocRoutePerson>());
document.setAdHocRouteWorkgroups(new ArrayList<AdHocRouteWorkgroup>());
}
@Required
public void setDateTimeService(DateTimeService dateTimeService) {
this.dateTimeService = dateTimeService;
}
protected DateTimeService getDateTimeService() {
if (this.dateTimeService == null) {
this.dateTimeService = CoreApiServiceLocator.getDateTimeService();
}
return this.dateTimeService;
}
@Required
public void setNoteService(NoteService noteService) {
this.noteService = noteService;
}
protected NoteService getNoteService() {
if (this.noteService == null) {
this.noteService = KRADServiceLocator.getNoteService();
}
return this.noteService;
}
public void setLegacyDataAdapter(LegacyDataAdapter legacyDataAdapter) {
this.legacyDataAdapter = legacyDataAdapter;
}
protected LegacyDataAdapter getLegacyDataAdapter() {
return this.legacyDataAdapter;
}
public void setWorkflowDocumentService(WorkflowDocumentService workflowDocumentService) {
this.workflowDocumentService = workflowDocumentService;
}
protected WorkflowDocumentService getWorkflowDocumentService() {
if (this.workflowDocumentService == null) {
this.workflowDocumentService = KRADServiceLocatorWeb.getWorkflowDocumentService();
}
return this.workflowDocumentService;
}
@Required
public void setDataDictionaryService(DataDictionaryService dataDictionaryService) {
this.dataDictionaryService = dataDictionaryService;
}
protected DataDictionaryService getDataDictionaryService() {
if (this.dataDictionaryService == null) {
this.dataDictionaryService = KRADServiceLocatorWeb.getDataDictionaryService();
}
return this.dataDictionaryService;
}
protected DocumentDictionaryService getDocumentDictionaryService() {
if (documentDictionaryService == null) {
documentDictionaryService = KRADServiceLocatorWeb.getDocumentDictionaryService();
}
return documentDictionaryService;
}
@Required
public void setDocumentDictionaryService(DocumentDictionaryService documentDictionaryService) {
this.documentDictionaryService = documentDictionaryService;
}
public PersonService getPersonService() {
if (personService == null) {
personService = KimApiServiceLocator.getPersonService();
}
return personService;
}
@Required
public void setKualiConfigurationService(ConfigurationService kualiConfigurationService) {
this.kualiConfigurationService = kualiConfigurationService;
}
public DocumentHeaderService getDocumentHeaderService() {
return documentHeaderService;
}
@Required
public void setDocumentHeaderService(DocumentHeaderService documentHeaderService) {
this.documentHeaderService = documentHeaderService;
}
@Required
public void setDocumentAdHocService(DocumentAdHocService documentAdHocService) {
this.documentAdHocService = documentAdHocService;
}
}
| rice-framework/krad-service-impl/src/main/java/org/kuali/rice/krad/service/impl/DocumentServiceImpl.java | /**
* Copyright 2005-2013 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.service.impl;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.StopWatch;
import org.kuali.rice.core.api.CoreApiServiceLocator;
import org.kuali.rice.core.api.config.ConfigurationException;
import org.kuali.rice.core.api.config.property.ConfigurationService;
import org.kuali.rice.core.api.datetime.DateTimeService;
import org.kuali.rice.core.api.util.RiceKeyConstants;
import org.kuali.rice.core.framework.persistence.jta.TransactionalNoValidationExceptionRollback;
import org.kuali.rice.kew.api.WorkflowDocument;
import org.kuali.rice.kew.api.exception.WorkflowException;
import org.kuali.rice.kim.api.identity.Person;
import org.kuali.rice.kim.api.identity.PersonService;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import org.kuali.rice.krad.UserSession;
import org.kuali.rice.krad.UserSessionUtils;
import org.kuali.rice.krad.bo.AdHocRoutePerson;
import org.kuali.rice.krad.bo.AdHocRouteRecipient;
import org.kuali.rice.krad.bo.AdHocRouteWorkgroup;
import org.kuali.rice.krad.bo.BusinessObject;
import org.kuali.rice.krad.bo.DocumentHeader;
import org.kuali.rice.krad.bo.Note;
import org.kuali.rice.krad.bo.PersistableBusinessObject;
import org.kuali.rice.krad.datadictionary.exception.UnknownDocumentTypeException;
import org.kuali.rice.krad.document.Document;
import org.kuali.rice.krad.document.DocumentAuthorizer;
import org.kuali.rice.krad.document.DocumentPresentationController;
import org.kuali.rice.krad.exception.DocumentAuthorizationException;
import org.kuali.rice.krad.exception.ValidationException;
import org.kuali.rice.krad.maintenance.Maintainable;
import org.kuali.rice.krad.maintenance.MaintenanceDocument;
import org.kuali.rice.krad.maintenance.MaintenanceDocumentBase;
import org.kuali.rice.krad.rules.rule.event.ApproveDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.BlanketApproveDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.CompleteDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.KualiDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.RouteDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.SaveDocumentEvent;
import org.kuali.rice.krad.rules.rule.event.SaveEvent;
import org.kuali.rice.krad.service.DataDictionaryService;
import org.kuali.rice.krad.service.DocumentAdHocService;
import org.kuali.rice.krad.service.DocumentDictionaryService;
import org.kuali.rice.krad.service.DocumentHeaderService;
import org.kuali.rice.krad.service.DocumentService;
import org.kuali.rice.krad.service.KRADServiceLocator;
import org.kuali.rice.krad.service.KRADServiceLocatorWeb;
import org.kuali.rice.krad.service.LegacyDataAdapter;
import org.kuali.rice.krad.service.NoteService;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.KRADConstants;
import org.kuali.rice.krad.util.KRADUtils;
import org.kuali.rice.krad.util.NoteType;
import org.kuali.rice.krad.workflow.service.WorkflowDocumentService;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.dao.OptimisticLockingFailureException;
/**
* Service implementation for the Document structure. It contains all of the document level type of
* processing and calling back into documents for various centralization of functionality. This is the default,
* Kuali delivered implementation which utilizes Workflow.
*
* @author Kuali Rice Team ([email protected])
*/
@TransactionalNoValidationExceptionRollback
public class DocumentServiceImpl implements DocumentService {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(DocumentServiceImpl.class);
protected DateTimeService dateTimeService;
protected NoteService noteService;
protected WorkflowDocumentService workflowDocumentService;
protected LegacyDataAdapter legacyDataAdapter;
protected DataDictionaryService dataDictionaryService;
protected DocumentDictionaryService documentDictionaryService;
protected PersonService personService;
protected ConfigurationService kualiConfigurationService;
protected DocumentHeaderService documentHeaderService;
protected DocumentAdHocService documentAdHocService;
/**
* @see org.kuali.rice.krad.service.DocumentService#saveDocument(org.kuali.rice.krad.document.Document)
*/
@Override
public Document saveDocument(Document document) throws WorkflowException, ValidationException {
return saveDocument(document, SaveDocumentEvent.class);
}
@Override
public Document saveDocument(Document document,
Class<? extends KualiDocumentEvent> kualiDocumentEventClass) throws WorkflowException, ValidationException {
checkForNulls(document);
if (kualiDocumentEventClass == null) {
throw new IllegalArgumentException("invalid (null) kualiDocumentEventClass");
}
// if event is not an instance of a SaveDocumentEvent or a SaveOnlyDocumentEvent
if (!SaveEvent.class.isAssignableFrom(kualiDocumentEventClass)) {
throw new ConfigurationException("The KualiDocumentEvent class '" + kualiDocumentEventClass.getName() +
"' does not implement the class '" + SaveEvent.class.getName() + "'");
}
// if (!getDocumentActionFlags(document).getCanSave()) {
// throw buildAuthorizationException("save", document);
// }
document.prepareForSave();
Document savedDocument = validateAndPersistDocumentAndSaveAdHocRoutingRecipients(document,
generateKualiDocumentEvent(document, kualiDocumentEventClass));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService().save(savedDocument.getDocumentHeader().getWorkflowDocument(), null);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
return savedDocument;
}
private KualiDocumentEvent generateKualiDocumentEvent(Document document,
Class<? extends KualiDocumentEvent> eventClass) throws ConfigurationException {
String potentialErrorMessage =
"Found error trying to generate Kuali Document Event using event class '" + eventClass.getName() +
"' for document " + document.getDocumentNumber();
try {
Constructor<?> usableConstructor = null;
List<Object> paramList = new ArrayList<Object>();
for (Constructor<?> currentConstructor : eventClass.getConstructors()) {
for (Class<?> parameterClass : currentConstructor.getParameterTypes()) {
if (Document.class.isAssignableFrom(parameterClass)) {
usableConstructor = currentConstructor;
paramList.add(document);
} else {
paramList.add(null);
}
}
if (KRADUtils.isNotNull(usableConstructor)) {
break;
}
}
if (usableConstructor == null) {
throw new RuntimeException("Cannot find a constructor for class '" + eventClass.getName() +
"' that takes in a document parameter");
}
return (KualiDocumentEvent) usableConstructor.newInstance(paramList.toArray());
} catch (SecurityException e) {
throw new ConfigurationException(potentialErrorMessage, e);
} catch (IllegalArgumentException e) {
throw new ConfigurationException(potentialErrorMessage, e);
} catch (InstantiationException e) {
throw new ConfigurationException(potentialErrorMessage, e);
} catch (IllegalAccessException e) {
throw new ConfigurationException(potentialErrorMessage, e);
} catch (InvocationTargetException e) {
throw new ConfigurationException(potentialErrorMessage, e);
}
}
/**
* @see org.kuali.rice.krad.service.DocumentService#routeDocument(org.kuali.rice.krad.document.Document,
* java.lang.String, java.util.List)
*/
@Override
public Document routeDocument(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws ValidationException, WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanRoute()) {
// throw buildAuthorizationException("route", document);
//}
document.prepareForSave();
Document savedDocument = validateAndPersistDocument(document, new RouteDocumentEvent(document));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService()
.route(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation, adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#approveDocument(org.kuali.rice.krad.document.Document,
* java.lang.String,
* java.util.List)
*/
@Override
public Document approveDocument(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws ValidationException, WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanApprove()) {
// throw buildAuthorizationException("approve", document);
//}
document.prepareForSave();
Document savedDocument = validateAndPersistDocument(document, new ApproveDocumentEvent(document));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService()
.approve(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation, adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#superUserApproveDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document superUserApproveDocument(Document document, String annotation) throws WorkflowException {
getLegacyDataAdapter().saveDocument(document);
prepareWorkflowDocument(document);
getWorkflowDocumentService().superUserApprove(document.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#superUserCancelDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document superUserCancelDocument(Document document, String annotation) throws WorkflowException {
getLegacyDataAdapter().saveDocument(document);
prepareWorkflowDocument(document);
getWorkflowDocumentService().superUserCancel(document.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#superUserCancelDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document superUserDisapproveDocument(Document document, String annotation) throws WorkflowException {
getLegacyDataAdapter().saveDocument(document);
return superUserDisapproveDocumentWithoutSaving(document, annotation);
}
/**
* @see org.kuali.rice.krad.service.DocumentService#superUserCancelDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document superUserDisapproveDocumentWithoutSaving(Document document, String annotation) throws WorkflowException {
prepareWorkflowDocument(document);
getWorkflowDocumentService()
.superUserDisapprove(document.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#disapproveDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document disapproveDocument(Document document, String annotation) throws Exception {
checkForNulls(document);
Note note = createNoteFromDocument(document, annotation);
//if note type is BO, override and link disapprove notes to Doc Header
if (document.getNoteType().equals(NoteType.BUSINESS_OBJECT)) {
note.setNoteTypeCode(NoteType.DOCUMENT_HEADER.getCode());
note.setRemoteObjectIdentifier(document.getDocumentHeader().getObjectId());
}
document.addNote(note);
//SAVE THE NOTE
//Note: This save logic is replicated here and in KualiDocumentAction, when to save (based on doc state) should be moved
// into a doc service method
getNoteService().save(note);
prepareWorkflowDocument(document);
getWorkflowDocumentService().disapprove(document.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#cancelDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Document cancelDocument(Document document, String annotation) throws WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanCancel()) {
// throw buildAuthorizationException("cancel", document);
//}
if (document instanceof MaintenanceDocument) {
MaintenanceDocument maintDoc = ((MaintenanceDocument) document);
if (maintDoc.getOldMaintainableObject() != null &&
(maintDoc.getOldMaintainableObject().getDataObject() instanceof BusinessObject)) {
((BusinessObject) maintDoc.getOldMaintainableObject().getDataObject()).refresh();
}
if (maintDoc.getNewMaintainableObject().getDataObject() instanceof BusinessObject) {
((BusinessObject) maintDoc.getNewMaintainableObject().getDataObject()).refresh();
}
}
prepareWorkflowDocument(document);
getWorkflowDocumentService().cancel(document.getDocumentHeader().getWorkflowDocument(), annotation);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
@Override
public Document recallDocument(Document document, String annotation, boolean cancel) throws WorkflowException {
checkForNulls(document);
Note note = createNoteFromDocument(document, annotation);
document.addNote(note);
getNoteService().save(note);
prepareWorkflowDocument(document);
getWorkflowDocumentService().recall(document.getDocumentHeader().getWorkflowDocument(), annotation, cancel);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#acknowledgeDocument(org.kuali.rice.krad.document.Document,
* java.lang.String,
* java.util.List)
*/
@Override
public Document acknowledgeDocument(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanAcknowledge()) {
// throw buildAuthorizationException("acknowledge", document);
//}
prepareWorkflowDocument(document);
getWorkflowDocumentService()
.acknowledge(document.getDocumentHeader().getWorkflowDocument(), annotation, adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#blanketApproveDocument(org.kuali.rice.krad.document.Document,
* java.lang.String,
* java.util.List)
*/
@Override
public Document blanketApproveDocument(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws ValidationException, WorkflowException {
checkForNulls(document);
//if (!getDocumentActionFlags(document).getCanBlanketApprove()) {
// throw buildAuthorizationException("blanket approve", document);
//}
document.prepareForSave();
Document savedDocument = validateAndPersistDocument(document, new BlanketApproveDocumentEvent(document));
prepareWorkflowDocument(savedDocument);
getWorkflowDocumentService()
.blanketApprove(savedDocument.getDocumentHeader().getWorkflowDocument(), annotation, adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
savedDocument.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(savedDocument);
return savedDocument;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#clearDocumentFyi(org.kuali.rice.krad.document.Document,
* java.util.List)
*/
@Override
public Document clearDocumentFyi(Document document,
List<AdHocRouteRecipient> adHocRecipients) throws WorkflowException {
checkForNulls(document);
// populate document content so searchable attributes will be indexed properly
document.populateDocumentForRouting();
getWorkflowDocumentService().clearFyi(document.getDocumentHeader().getWorkflowDocument(), adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
/**
* @see org.kuali.rice.krad.service.DocumentService#completeDocument(org.kuali.rice.krad.document.Document,
* java.lang.String,
* java.util.List)
*/
@Override
public Document completeDocument(Document document, String annotation,
List adHocRecipients) throws WorkflowException {
checkForNulls(document);
document.prepareForSave();
validateAndPersistDocument(document, new CompleteDocumentEvent(document));
prepareWorkflowDocument(document);
getWorkflowDocumentService().complete(document.getDocumentHeader().getWorkflowDocument(), annotation,
adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
return document;
}
protected void checkForNulls(Document document) {
if (document == null) {
throw new IllegalArgumentException("invalid (null) document");
}
if (document.getDocumentNumber() == null) {
throw new IllegalStateException("invalid (null) documentHeaderId");
}
}
private Document validateAndPersistDocumentAndSaveAdHocRoutingRecipients(Document document,
KualiDocumentEvent event) {
/*
* Using this method to wrap validateAndPersistDocument to keep everything in one transaction. This avoids modifying the
* signature on validateAndPersistDocument method
*/
List<AdHocRouteRecipient> adHocRoutingRecipients = new ArrayList<AdHocRouteRecipient>();
adHocRoutingRecipients.addAll(document.getAdHocRoutePersons());
adHocRoutingRecipients.addAll(document.getAdHocRouteWorkgroups());
documentAdHocService.replaceAdHocsForDocument( document.getDocumentNumber(), adHocRoutingRecipients );
return validateAndPersistDocument(document, event);
}
/**
* @see org.kuali.rice.krad.service.DocumentService#documentExists(java.lang.String)
*/
@Override
public boolean documentExists(String documentHeaderId) {
// validate parameters
if (StringUtils.isBlank(documentHeaderId)) {
throw new IllegalArgumentException("invalid (blank) documentHeaderId");
}
boolean internalUserSession = false;
try {
// KFSMI-2543 - allowed method to run without a user session so it can be used
// by workflow processes
if (GlobalVariables.getUserSession() == null) {
internalUserSession = true;
GlobalVariables.setUserSession(new UserSession(KRADConstants.SYSTEM_USER));
GlobalVariables.clear();
}
// look for workflowDocumentHeader, since that supposedly won't break the transaction
if (getWorkflowDocumentService().workflowDocumentExists(documentHeaderId)) {
// look for docHeaderId, since that fails without breaking the transaction
return documentHeaderService.getDocumentHeaderById(documentHeaderId) != null;
}
return false;
} finally {
// if a user session was established for this call, clear it our
if (internalUserSession) {
GlobalVariables.clear();
GlobalVariables.setUserSession(null);
}
}
}
/**
* Creates a new document by class.
*
* @see org.kuali.rice.krad.service.DocumentService#getNewDocument(java.lang.Class)
*/
@Override
public Document getNewDocument(Class<? extends Document> documentClass) throws WorkflowException {
if (documentClass == null) {
throw new IllegalArgumentException("invalid (null) documentClass");
}
if (!Document.class.isAssignableFrom(documentClass)) {
throw new IllegalArgumentException("invalid (non-Document) documentClass");
}
String documentTypeName = getDataDictionaryService().getDocumentTypeNameByClass(documentClass);
if (StringUtils.isBlank(documentTypeName)) {
throw new UnknownDocumentTypeException(
"unable to get documentTypeName for unknown documentClass '" + documentClass.getName() + "'");
}
return getNewDocument(documentTypeName);
}
/**
* Creates a new document by document type name. The principal name
* passed in will be used as the document initiator. If the initiatorPrincipalNm
* is null or blank, the current user will be used.
*
* @see org.kuali.rice.krad.service.DocumentService#getNewDocument(String, String)
*/
@Override
public Document getNewDocument(String documentTypeName, String initiatorPrincipalNm) throws WorkflowException {
// argument validation
String watchName = "DocumentServiceImpl.getNewDocument";
StopWatch watch = new StopWatch();
watch.start();
if (LOG.isDebugEnabled()) {
LOG.debug(watchName + ": started");
}
if (StringUtils.isBlank(documentTypeName)) {
throw new IllegalArgumentException("invalid (blank) documentTypeName");
}
if (GlobalVariables.getUserSession() == null) {
throw new IllegalStateException(
"GlobalVariables must be populated with a valid UserSession before a new document can be created");
}
// get the class for this docTypeName
Class<? extends Document> documentClass = getDocumentClassByTypeName(documentTypeName);
// get the initiator
Person initiator = null;
if (StringUtils.isBlank(initiatorPrincipalNm)) {
initiator = GlobalVariables.getUserSession().getPerson();
} else {
initiator = KimApiServiceLocator.getPersonService().getPersonByPrincipalName(initiatorPrincipalNm);
if (initiator == null) {
initiator = GlobalVariables.getUserSession().getPerson();
}
}
// get the authorization
DocumentAuthorizer documentAuthorizer = getDocumentDictionaryService().getDocumentAuthorizer(documentTypeName);
DocumentPresentationController documentPresentationController =
getDocumentDictionaryService().getDocumentPresentationController(documentTypeName);
// make sure this person is authorized to initiate
if ( LOG.isDebugEnabled() ) {
LOG.debug("calling canInitiate from getNewDocument(" + documentTypeName + "," + initiatorPrincipalNm + ")");
}
if (!documentPresentationController.canInitiate(documentTypeName) ||
!documentAuthorizer.canInitiate(documentTypeName, initiator)) {
throw new DocumentAuthorizationException(initiator.getPrincipalName(), "initiate", documentTypeName);
}
// initiate new workflow entry, get the workflow doc
WorkflowDocument workflowDocument = getWorkflowDocumentService().createWorkflowDocument(documentTypeName, initiator);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(), workflowDocument);
// create a new document header object
DocumentHeader documentHeader = new DocumentHeader();
documentHeader.setWorkflowDocument(workflowDocument);
documentHeader.setDocumentNumber(workflowDocument.getDocumentId());
// build Document of specified type
Document document = null;
try {
// all maintenance documents have same class
if (MaintenanceDocumentBase.class.isAssignableFrom(documentClass)) {
Class<?>[] defaultConstructor = new Class[]{String.class};
Constructor<? extends Document> cons = documentClass.getConstructor(defaultConstructor);
if (cons == null) {
throw new ConfigurationException(
"Could not find constructor with document type name parameter needed for Maintenance Document Base class");
}
document = cons.newInstance(documentTypeName);
} else {
// non-maintenance document
document = documentClass.newInstance();
}
} catch (IllegalAccessException e) {
throw new RuntimeException("Error instantiating Document", e);
} catch (InstantiationException e) {
throw new RuntimeException("Error instantiating Document", e);
} catch (SecurityException e) {
throw new RuntimeException("Error instantiating Maintenance Document", e);
} catch (NoSuchMethodException e) {
throw new RuntimeException(
"Error instantiating Maintenance Document: No constructor with String parameter found", e);
} catch (IllegalArgumentException e) {
throw new RuntimeException("Error instantiating Maintenance Document", e);
} catch (InvocationTargetException e) {
throw new RuntimeException("Error instantiating Maintenance Document", e);
}
document.setDocumentHeader(documentHeader);
document.setDocumentNumber(documentHeader.getDocumentNumber());
watch.stop();
if (LOG.isDebugEnabled()) {
LOG.debug(watchName + ": " + watch.toString());
}
return document;
}
/**
* Creates a new document by document type name.
*
* @see org.kuali.rice.krad.service.DocumentService#getNewDocument(java.lang.String)
*/
@Override
public Document getNewDocument(String documentTypeName) throws WorkflowException {
return getNewDocument(documentTypeName, null);
}
/**
* This is temporary until workflow 2.0 and reads from a table to get documents whose status has changed to A
* (approved - no
* outstanding approval actions requested)
*
* @param documentHeaderId
* @return Document
* @throws WorkflowException
*/
@Override
public Document getByDocumentHeaderId(String documentHeaderId) throws WorkflowException {
if (documentHeaderId == null) {
throw new IllegalArgumentException("invalid (null) documentHeaderId");
}
boolean internalUserSession = false;
try {
// KFSMI-2543 - allowed method to run without a user session so it can be used
// by workflow processes
if (GlobalVariables.getUserSession() == null) {
internalUserSession = true;
GlobalVariables.setUserSession(new UserSession(KRADConstants.SYSTEM_USER));
GlobalVariables.clear();
}
WorkflowDocument workflowDocument = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Retrieving doc id: " + documentHeaderId + " from workflow service.");
}
workflowDocument = getWorkflowDocumentService()
.loadWorkflowDocument(documentHeaderId, GlobalVariables.getUserSession().getPerson());
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(), workflowDocument);
Class<? extends Document> documentClass = getDocumentClassByTypeName(workflowDocument.getDocumentTypeName());
// retrieve the Document
Document document = getLegacyDataAdapter().findByDocumentHeaderId(documentClass, documentHeaderId);
return postProcessDocument(documentHeaderId, workflowDocument, document);
} finally {
// if a user session was established for this call, clear it out
if (internalUserSession) {
GlobalVariables.clear();
GlobalVariables.setUserSession(null);
}
}
}
/**
* @see org.kuali.rice.krad.service.DocumentService#getByDocumentHeaderIdSessionless(java.lang.String)
*/
@Override
public Document getByDocumentHeaderIdSessionless(String documentHeaderId) throws WorkflowException {
if (documentHeaderId == null) {
throw new IllegalArgumentException("invalid (null) documentHeaderId");
}
WorkflowDocument workflowDocument = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Retrieving doc id: " + documentHeaderId + " from workflow service.");
}
Person person = getPersonService().getPersonByPrincipalName(KRADConstants.SYSTEM_USER);
workflowDocument = workflowDocumentService.loadWorkflowDocument(documentHeaderId, person);
Class<? extends Document> documentClass = getDocumentClassByTypeName(workflowDocument.getDocumentTypeName());
// retrieve the Document
Document document = getLegacyDataAdapter().findByDocumentHeaderId(documentClass, documentHeaderId);
return postProcessDocument(documentHeaderId, workflowDocument, document);
}
private Class<? extends Document> getDocumentClassByTypeName(String documentTypeName) {
if (StringUtils.isBlank(documentTypeName)) {
throw new IllegalArgumentException("invalid (blank) documentTypeName");
}
Class<? extends Document> clazz = getDataDictionaryService().getDocumentClassByTypeName(documentTypeName);
if (clazz == null) {
throw new UnknownDocumentTypeException(
"unable to get class for unknown documentTypeName '" + documentTypeName + "'");
}
return clazz;
}
/**
* Loads the Notes for the note target on this Document.
*
* @param document the document for which to load the notes
*/
protected void loadNotes(final Document document) {
if (isNoteTargetReady(document)) {
Object legacyObjectClass;
if (document instanceof MaintenanceDocument) {
MaintenanceDocument mdoc = (MaintenanceDocument) document;
legacyObjectClass = ((Maintainable) org.apache.commons.lang.ObjectUtils.defaultIfNull(mdoc.getOldMaintainableObject(), mdoc.getNewMaintainableObject())).getDataObjectClass();
} else {
legacyObjectClass = document.getClass();
}
List<Note> notes = new ArrayList<Note>();
if (StringUtils.isNotBlank(document.getNoteTarget().getObjectId())) {
notes.addAll(getNoteService().getByRemoteObjectId(document.getNoteTarget().getObjectId()));
}
//notes created on 'disapprove' are linked to Doc Header, so this checks that even if notetype = BO
if (document.getNoteType().equals(NoteType.BUSINESS_OBJECT) && document.getDocumentHeader()
.getWorkflowDocument().isDisapproved()) {
notes.addAll(getNoteService().getByRemoteObjectId(document.getDocumentHeader().getObjectId()));
}
document.setNotes(notes);
}
}
/**
* Performs required post-processing for every document from the documentDao
*
* @param documentHeaderId
* @param workflowDocument
* @param document
*/
private Document postProcessDocument(String documentHeaderId, WorkflowDocument workflowDocument, Document document) {
if (document != null) {
document.getDocumentHeader().setWorkflowDocument(workflowDocument);
document.processAfterRetrieve();
loadNotes(document);
}
return document;
}
/**
* The default implementation - this retrieves all documents by a list of documentHeader for a given class.
*
* @see org.kuali.rice.krad.service.DocumentService#getDocumentsByListOfDocumentHeaderIds(java.lang.Class,
* java.util.List)
*/
@Override
public List<Document> getDocumentsByListOfDocumentHeaderIds(Class<? extends Document> documentClass,
List<String> documentHeaderIds) throws WorkflowException {
// validate documentHeaderIdList and contents
if (documentHeaderIds == null) {
throw new IllegalArgumentException("invalid (null) documentHeaderId list");
}
int index = 0;
for (String documentHeaderId : documentHeaderIds) {
if (StringUtils.isBlank(documentHeaderId)) {
throw new IllegalArgumentException("invalid (blank) documentHeaderId at list index " + index);
}
index++;
}
boolean internalUserSession = false;
try {
// KFSMI-2543 - allowed method to run without a user session so it can be used
// by workflow processes
if (GlobalVariables.getUserSession() == null) {
internalUserSession = true;
GlobalVariables.setUserSession(new UserSession(KRADConstants.SYSTEM_USER));
GlobalVariables.clear();
}
// retrieve all documents that match the document header ids
List<? extends Document> rawDocuments = getLegacyDataAdapter().findByDocumentHeaderIds(documentClass,
documentHeaderIds);
// post-process them
List<Document> documents = new ArrayList<Document>();
for (Document document : rawDocuments) {
WorkflowDocument workflowDocument = getWorkflowDocumentService().loadWorkflowDocument(document.getDocumentNumber(), GlobalVariables.getUserSession().getPerson());
document = postProcessDocument(document.getDocumentNumber(), workflowDocument, document);
documents.add(document);
}
return documents;
} finally {
// if a user session was established for this call, clear it our
if (internalUserSession) {
GlobalVariables.clear();
GlobalVariables.setUserSession(null);
}
}
}
/* Helper Methods */
/**
* Validates and persists a document.
*/
@Override
public Document validateAndPersistDocument(Document document, KualiDocumentEvent event) throws ValidationException {
if (document == null) {
LOG.error("document passed to validateAndPersist was null");
throw new IllegalArgumentException("invalid (null) document");
}
if (LOG.isDebugEnabled()) {
LOG.debug("validating and preparing to persist document " + document.getDocumentNumber());
}
document.validateBusinessRules(event);
document.prepareForSave(event);
// save the document
Document savedDocument = null;
try {
if (LOG.isInfoEnabled()) {
LOG.info("storing document " + document.getDocumentNumber());
}
savedDocument = getLegacyDataAdapter().saveDocument(document);
} catch (OptimisticLockingFailureException e) {
LOG.error("exception encountered on store of document " + e.getMessage());
throw e;
}
boolean notesSaved = saveDocumentNotes(savedDocument);
if (!notesSaved) {
if (LOG.isInfoEnabled()) {
LOG.info(
"Notes not saved during validateAndPersistDocument, likely means that note save needs to be deferred because note target is not ready.");
}
}
savedDocument.postProcessSave(event);
return savedDocument;
}
/**
* Sets the title and app document id in the flex document
*
* @param document
* @throws org.kuali.rice.kew.api.exception.WorkflowException
*/
@Override
public void prepareWorkflowDocument(Document document) throws WorkflowException {
// populate document content so searchable attributes will be indexed properly
document.populateDocumentForRouting();
// make sure we push the document title into the workflowDocument
populateDocumentTitle(document);
// make sure we push the application document id into the workflowDocument
populateApplicationDocumentId(document);
}
/**
* This method will grab the generated document title from the document and add it to the workflowDocument so that
* it gets pushed into
* workflow when routed.
*
* @param document
* @throws org.kuali.rice.kew.api.exception.WorkflowException
*/
private void populateDocumentTitle(Document document) throws WorkflowException {
String documentTitle = document.getDocumentTitle();
if (StringUtils.isNotBlank(documentTitle)) {
document.getDocumentHeader().getWorkflowDocument().setTitle(documentTitle);
}
}
/**
* This method will grab the organization document number from the document and add it to the workflowDocument so
* that it gets pushed
* into workflow when routed.
*
* @param document
*/
private void populateApplicationDocumentId(Document document) {
String organizationDocumentNumber = document.getDocumentHeader().getOrganizationDocumentNumber();
if (StringUtils.isNotBlank(organizationDocumentNumber)) {
document.getDocumentHeader().getWorkflowDocument().setApplicationDocumentId(organizationDocumentNumber);
}
}
/**
* This is to allow for updates of document statuses and other related requirements for updates outside of the
* initial save and
* route
*/
@Override
public Document updateDocument(Document document) {
checkForNulls(document);
return getLegacyDataAdapter().saveDocument(document);
}
/**
* @see org.kuali.rice.krad.service.DocumentService#createNoteFromDocument(org.kuali.rice.krad.document.Document,
* java.lang.String)
*/
@Override
public Note createNoteFromDocument(Document document, String text) {
Note note = new Note();
note.setNotePostedTimestamp(getDateTimeService().getCurrentTimestamp());
note.setVersionNumber(Long.valueOf(1));
note.setNoteText(text);
note.setNoteTypeCode(document.getNoteType().getCode());
PersistableBusinessObject bo = document.getNoteTarget();
// TODO gah! this is awful
Person kualiUser = GlobalVariables.getUserSession().getPerson();
if (kualiUser == null) {
throw new IllegalStateException("Current UserSession has a null Person.");
}
return bo == null ? null : getNoteService().createNote(note, bo, kualiUser.getPrincipalId());
}
/**
* @see org.kuali.rice.krad.service.DocumentService#saveDocumentNotes(org.kuali.rice.krad.document.Document)
*/
@Override
public boolean saveDocumentNotes(Document document) {
if (isNoteTargetReady(document)) {
List<Note> notes = document.getNotes();
for (Note note : document.getNotes()) {
linkNoteRemoteObjectId(note, document.getNoteTarget());
}
getNoteService().saveNoteList(notes);
return true;
}
return false;
}
/**
* @see org.kuali.rice.krad.service.DocumentService
*/
@Override
public void sendNoteRouteNotification(Document document, Note note, Person sender) throws WorkflowException {
AdHocRouteRecipient routeRecipient = note.getAdHocRouteRecipient();
// build notification request
Person requestedUser = this.getPersonService().getPersonByPrincipalName(routeRecipient.getId());
String senderName = sender.getFirstName() + " " + sender.getLastName();
String requestedName = requestedUser.getFirstName() + " " + requestedUser.getLastName();
String notificationText =
kualiConfigurationService.getPropertyValueAsString(
RiceKeyConstants.MESSAGE_NOTE_NOTIFICATION_ANNOTATION);
if (StringUtils.isBlank(notificationText)) {
throw new RuntimeException(
"No annotation message found for note notification. Message needs added to application resources with key:" +
RiceKeyConstants.MESSAGE_NOTE_NOTIFICATION_ANNOTATION);
}
notificationText =
MessageFormat.format(notificationText, new Object[]{senderName, requestedName, note.getNoteText()});
List<AdHocRouteRecipient> routeRecipients = new ArrayList<AdHocRouteRecipient>();
routeRecipients.add(routeRecipient);
workflowDocumentService
.sendWorkflowNotification(document.getDocumentHeader().getWorkflowDocument(), notificationText,
routeRecipients, KRADConstants.NOTE_WORKFLOW_NOTIFICATION_REQUEST_LABEL);
// clear recipient allowing an notification to be sent to another person
note.setAdHocRouteRecipient(new AdHocRoutePerson());
}
/**
* Determines if the given document's note target is ready for notes to be
* attached and persisted against it. This method verifies that the document's
* note target is non-null as well as checking that it has a non-empty object id.
*
* @param document the document on which to check for note target readiness
* @return true if the note target is ready, false otherwise
*/
protected boolean isNoteTargetReady(Document document) {
//special case for disappoved documents
if (document.getDocumentHeader().getWorkflowDocument().isDisapproved()) {
return true;
}
PersistableBusinessObject noteTarget = document.getNoteTarget();
if (noteTarget == null || StringUtils.isBlank(noteTarget.getObjectId())) {
return false;
}
return true;
}
private void linkNoteRemoteObjectId(Note note, PersistableBusinessObject noteTarget) {
String objectId = noteTarget.getObjectId();
if (StringUtils.isBlank(objectId)) {
throw new IllegalStateException(
"Attempted to link a Note with a PersistableBusinessObject with no object id");
}
note.setRemoteObjectIdentifier(noteTarget.getObjectId());
}
/**
* @see org.kuali.rice.krad.service.DocumentService#sendAdHocRequests(org.kuali.rice.krad.document.Document, String, java.util.List)
*/
@Override
public void sendAdHocRequests(Document document, String annotation,
List<AdHocRouteRecipient> adHocRecipients) throws WorkflowException {
prepareWorkflowDocument(document);
getWorkflowDocumentService()
.sendWorkflowNotification(document.getDocumentHeader().getWorkflowDocument(), annotation,
adHocRecipients);
UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(),
document.getDocumentHeader().getWorkflowDocument());
removeAdHocPersonsAndWorkgroups(document);
}
private void removeAdHocPersonsAndWorkgroups(Document document) {
documentAdHocService.replaceAdHocsForDocument(document.getDocumentNumber(), null);
document.setAdHocRoutePersons(new ArrayList<AdHocRoutePerson>());
document.setAdHocRouteWorkgroups(new ArrayList<AdHocRouteWorkgroup>());
}
@Required
public void setDateTimeService(DateTimeService dateTimeService) {
this.dateTimeService = dateTimeService;
}
protected DateTimeService getDateTimeService() {
if (this.dateTimeService == null) {
this.dateTimeService = CoreApiServiceLocator.getDateTimeService();
}
return this.dateTimeService;
}
@Required
public void setNoteService(NoteService noteService) {
this.noteService = noteService;
}
protected NoteService getNoteService() {
if (this.noteService == null) {
this.noteService = KRADServiceLocator.getNoteService();
}
return this.noteService;
}
public void setLegacyDataAdapter(LegacyDataAdapter legacyDataAdapter) {
this.legacyDataAdapter = legacyDataAdapter;
}
protected LegacyDataAdapter getLegacyDataAdapter() {
return this.legacyDataAdapter;
}
public void setWorkflowDocumentService(WorkflowDocumentService workflowDocumentService) {
this.workflowDocumentService = workflowDocumentService;
}
protected WorkflowDocumentService getWorkflowDocumentService() {
if (this.workflowDocumentService == null) {
this.workflowDocumentService = KRADServiceLocatorWeb.getWorkflowDocumentService();
}
return this.workflowDocumentService;
}
@Required
public void setDataDictionaryService(DataDictionaryService dataDictionaryService) {
this.dataDictionaryService = dataDictionaryService;
}
protected DataDictionaryService getDataDictionaryService() {
if (this.dataDictionaryService == null) {
this.dataDictionaryService = KRADServiceLocatorWeb.getDataDictionaryService();
}
return this.dataDictionaryService;
}
protected DocumentDictionaryService getDocumentDictionaryService() {
if (documentDictionaryService == null) {
documentDictionaryService = KRADServiceLocatorWeb.getDocumentDictionaryService();
}
return documentDictionaryService;
}
@Required
public void setDocumentDictionaryService(DocumentDictionaryService documentDictionaryService) {
this.documentDictionaryService = documentDictionaryService;
}
public PersonService getPersonService() {
if (personService == null) {
personService = KimApiServiceLocator.getPersonService();
}
return personService;
}
@Required
public void setKualiConfigurationService(ConfigurationService kualiConfigurationService) {
this.kualiConfigurationService = kualiConfigurationService;
}
public DocumentHeaderService getDocumentHeaderService() {
return documentHeaderService;
}
@Required
public void setDocumentHeaderService(DocumentHeaderService documentHeaderService) {
this.documentHeaderService = documentHeaderService;
}
@Required
public void setDocumentAdHocService(DocumentAdHocService documentAdHocService) {
this.documentAdHocService = documentAdHocService;
}
}
| KULRICE-11034 : fixed up a number of places where the saved document object was not being used and the workflow document was not getting copied to the new instance
git-svn-id: 2a5d2b5a02908a0c4ba7967b726d8c4198d1b9ed@42354 7a7aa7f6-c479-11dc-97e2-85a2497f191d
| rice-framework/krad-service-impl/src/main/java/org/kuali/rice/krad/service/impl/DocumentServiceImpl.java | KULRICE-11034 : fixed up a number of places where the saved document object was not being used and the workflow document was not getting copied to the new instance |
|
Java | apache-2.0 | 9d49c06bc3d30be464a2dcee2fb206e455efb34d | 0 | BrunoEberhard/minimal-j,BrunoEberhard/minimal-j,BrunoEberhard/minimal-j | package org.minimalj.frontend.json;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.minimalj.application.Application;
import org.minimalj.application.ApplicationContext;
import org.minimalj.frontend.page.ActionGroup;
import org.minimalj.frontend.page.ObjectPage;
import org.minimalj.frontend.page.Page;
import org.minimalj.frontend.page.SearchPage;
import org.minimalj.frontend.toolkit.Action;
import org.minimalj.util.StringUtils;
import org.minimalj.util.resources.Resources;
public class JsonClientSession {
private static final Map<String, JsonClientSession> sessions = new HashMap<>();
private final ApplicationContext applicatonContext;
private Page visiblePage;
private String visiblePageId;
private Map<String, JsonComponent> componentById = new HashMap<>(100);
private Map<String, Page> pageById = new HashMap<>();
private JsonOutput output;
public JsonClientSession(ApplicationContext context) {
this.applicatonContext = context;
}
public static JsonClientSession getSession(String sessionId) {
return sessions.get(sessionId);
}
public static String createSession() {
String sessionId = UUID.randomUUID().toString();
JsonClientSession session = new JsonClientSession(null);
sessions.put(sessionId, session);
return sessionId;
}
public JsonOutput handle(JsonInput input) {
JsonClientToolkit.setSession(this);
output = new JsonOutput();
if (input.containsObject(JsonInput.SHOW_PAGE)) {
String pageId = (String) input.getObject(JsonInput.SHOW_PAGE);
Page page;
if (pageId != null) {
page = pageById.get(pageId);
} else {
page = Application.getApplication().createDefaultPage();
}
showPage(page, pageId);
}
Map<String, Object> changedValue = input.get(JsonInput.CHANGED_VALUE);
for (Map.Entry<String, Object> entry : changedValue.entrySet()) {
String componentId = entry.getKey();
String newValue = (String) entry.getValue();
JsonComponent component = componentById.get(componentId);
((JsonInputComponent) component).setValue((String) newValue);
}
String actionId = (String) input.getObject(JsonInput.ACTIVATED_ACTION);
if (actionId != null) {
JsonAction action = (JsonAction) componentById.get(actionId);
action.action();
}
Map<String, Object> tableAction = input.get(JsonInput.TABLE_ACTION);
if (tableAction != null && !tableAction.isEmpty()) {
JsonTable<?> table = (JsonTable<?>) componentById.get(tableAction.get("table"));
int row = ((Long) tableAction.get("row")).intValue();
table.action(row);
}
String search = (String) input.getObject("search");
if (search != null) {
SearchPage searchPage = Application.getApplication().getSearchPages()[0];
searchPage.setQuery(search);
showPage(searchPage);
}
JsonClientToolkit.setSession(null);
return output;
}
public void showPage(Page page) {
showPage(page, null);
}
public void showPage(Page page, String pageId) {
componentById.clear();
JsonComponent content = (JsonComponent) page.getContent();
registerId(content);
output.add("content", content);
Object menu = createMenu(page);
registerId(menu);
output.add("menu", menu);
if (pageId == null) {
pageId = UUID.randomUUID().toString();
}
pageById.put(pageId, page);
output.add("pageId", pageId);
this.visiblePage = page;
this.visiblePageId = pageId;
}
public void refresh() {
showPage(visiblePage, visiblePageId);
}
private List<Object> createMenu(Page page) {
List<Object> items = new ArrayList<>();
items.add(createFileMenu());
Object objectMenu = createObjectMenu(page);
if (objectMenu != null) {
items.add(objectMenu);
}
return items;
}
private Map<String, Object> createFileMenu() {
Map<String, Object> fileMenu = createMenu("file");
List<Object> fileItems = new ArrayList<>();
fileMenu.put("items", fileItems);
List<Action> actionsNew = Application.getApplication().getActionsNew();
if (!actionsNew.isEmpty()) {
Map<String, Object> newMenu = createMenu("new");
fileItems.add(newMenu);
List<Object> itemsNew = createActions(actionsNew);
newMenu.put("items", itemsNew);
fileItems.add("separator");
}
List<Action> actionsImport = Application.getApplication().getActionImport();
if (!actionsImport.isEmpty()) {
Map<String, Object> importMenu = createMenu("import");
fileItems.add(importMenu);
List<Object> itemsImport = createActions(actionsImport);
importMenu.put("items", itemsImport);
}
List<Action> actionsExport = Application.getApplication().getActionExport();
if (!actionsExport.isEmpty()) {
Map<String, Object> exportMenu = createMenu("export");
fileItems.add(exportMenu);
List<Object> itemsexport = createActions(actionsExport);
exportMenu.put("items", itemsexport);
}
// if (!actionsImport.isEmpty() || !actionsExport.isEmpty()) {
// fileItems.add("separator");
// }
return fileMenu;
}
List<Object> createActions(List<Action> actions) {
List<Object> items = new ArrayList<>();
for (Action action : actions) {
items.add(createAction(action));
}
return items;
}
List<Object> createActions(Action[] actions) {
return createActions(Arrays.asList(actions));
}
JsonComponent createAction(Action action) {
JsonComponent item;
if (action instanceof ActionGroup) {
ActionGroup actionGroup = (ActionGroup) action;
item = new JsonAction.JsonActionGroup();
item.put("items", createActions(actionGroup.getItems()));
} else {
item = new JsonAction(action);
}
item.put("name", action.getName());
return item;
}
public void registerId(Object o) {
if (o instanceof JsonComponent) {
JsonComponent component = (JsonComponent) o;
String id = component.getId();
if (id != null) {
componentById.put(component.getId(), component);
}
}
if (o instanceof Map) {
Map map = (Map) o;
for (Object o2 : map.values()) {
registerId(o2);
}
}
if (o instanceof List) {
List list = (List) o;
for (Object o2 : list) {
registerId(o2);
}
}
}
private Map<String, Object> createObjectMenu(Page page) {
if (page instanceof ObjectPage) {
ActionGroup actionGroup = ((ObjectPage<?>) page).getMenu();
if (actionGroup != null && actionGroup.getItems() != null) {
Map<String, Object> objectMenu = createAction(actionGroup);
objectMenu.put("items", createActions(actionGroup.getItems()));
return objectMenu;
}
}
return null;
}
private Map<String, Object> createMenu(String resourceName) {
Map<String, Object> menu = new LinkedHashMap<>();
menu.put("name", Resources.getString("Menu." + resourceName));
String description = Resources.getString("Menu." + resourceName + ".description");
if (!StringUtils.isEmpty(description)) {
menu.put("description", description);
}
return menu;
}
public void openDialog(JsonDialog jsonDialog) {
registerId(jsonDialog);
output.add("dialog", jsonDialog);
}
public void closeDialog(String id) {
output.add("closeDialog", id);
}
public void switchContent(String switchId, JsonComponent content) {
registerId(content);
Map<String, Object> sw = new HashMap<>();
sw.put("id", switchId);
sw.put("content", content);
output.add("switch", sw);
}
public void propertyChange(String componentId, String property, Object value) {
output.propertyChange(componentId, property, value);
}
}
| src/main/java/org/minimalj/frontend/json/JsonClientSession.java | package org.minimalj.frontend.json;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.minimalj.application.Application;
import org.minimalj.application.ApplicationContext;
import org.minimalj.frontend.page.ActionGroup;
import org.minimalj.frontend.page.ObjectPage;
import org.minimalj.frontend.page.Page;
import org.minimalj.frontend.page.SearchPage;
import org.minimalj.frontend.toolkit.Action;
import org.minimalj.util.StringUtils;
import org.minimalj.util.resources.Resources;
public class JsonClientSession {
private static final Map<String, JsonClientSession> sessions = new HashMap<>();
private final ApplicationContext applicatonContext;
private Page visiblePage;
private Map<String, JsonComponent> componentById = new HashMap<>(100);
private Map<String, Page> pageById = new HashMap<>();
private JsonOutput output;
public JsonClientSession(ApplicationContext context) {
this.applicatonContext = context;
}
public static JsonClientSession getSession(String sessionId) {
return sessions.get(sessionId);
}
public static String createSession() {
String sessionId = UUID.randomUUID().toString();
JsonClientSession session = new JsonClientSession(null);
sessions.put(sessionId, session);
return sessionId;
}
public JsonOutput handle(JsonInput input) {
JsonClientToolkit.setSession(this);
output = new JsonOutput();
if (input.containsObject(JsonInput.SHOW_PAGE)) {
String pageId = (String) input.getObject(JsonInput.SHOW_PAGE);
Page page;
if (pageId != null) {
page = pageById.get(pageId);
} else {
page = Application.getApplication().createDefaultPage();
}
showPage(page, pageId);
}
Map<String, Object> changedValue = input.get(JsonInput.CHANGED_VALUE);
for (Map.Entry<String, Object> entry : changedValue.entrySet()) {
String componentId = entry.getKey();
String newValue = (String) entry.getValue();
JsonComponent component = componentById.get(componentId);
((JsonInputComponent) component).setValue((String) newValue);
}
String actionId = (String) input.getObject(JsonInput.ACTIVATED_ACTION);
if (actionId != null) {
JsonAction action = (JsonAction) componentById.get(actionId);
action.action();
}
Map<String, Object> tableAction = input.get(JsonInput.TABLE_ACTION);
if (tableAction != null && !tableAction.isEmpty()) {
JsonTable<?> table = (JsonTable<?>) componentById.get(tableAction.get("table"));
int row = ((Long) tableAction.get("row")).intValue();
table.action(row);
}
String search = (String) input.getObject("search");
if (search != null) {
SearchPage searchPage = Application.getApplication().getSearchPages()[0];
searchPage.setQuery(search);
showPage(searchPage);
}
JsonClientToolkit.setSession(null);
return output;
}
public void showPage(Page page) {
showPage(page, null);
}
public void showPage(Page page, String pageId) {
componentById.clear();
JsonComponent content = (JsonComponent) page.getContent();
registerId(content);
output.add("content", content);
Object menu = createMenu(page);
registerId(menu);
output.add("menu", menu);
if (pageId == null) {
pageId = UUID.randomUUID().toString();
}
pageById.put(pageId, page);
output.add("pageId", pageId);
}
private List<Object> createMenu(Page page) {
List<Object> items = new ArrayList<>();
items.add(createFileMenu());
Object objectMenu = createObjectMenu(page);
if (objectMenu != null) {
items.add(objectMenu);
}
return items;
}
private Map<String, Object> createFileMenu() {
Map<String, Object> fileMenu = createMenu("file");
List<Object> fileItems = new ArrayList<>();
fileMenu.put("items", fileItems);
List<Action> actionsNew = Application.getApplication().getActionsNew();
if (!actionsNew.isEmpty()) {
Map<String, Object> newMenu = createMenu("new");
fileItems.add(newMenu);
List<Object> itemsNew = createActions(actionsNew);
newMenu.put("items", itemsNew);
fileItems.add("separator");
}
List<Action> actionsImport = Application.getApplication().getActionImport();
if (!actionsImport.isEmpty()) {
Map<String, Object> importMenu = createMenu("import");
fileItems.add(importMenu);
List<Object> itemsImport = createActions(actionsImport);
importMenu.put("items", itemsImport);
}
List<Action> actionsExport = Application.getApplication().getActionExport();
if (!actionsExport.isEmpty()) {
Map<String, Object> exportMenu = createMenu("export");
fileItems.add(exportMenu);
List<Object> itemsexport = createActions(actionsExport);
exportMenu.put("items", itemsexport);
}
// if (!actionsImport.isEmpty() || !actionsExport.isEmpty()) {
// fileItems.add("separator");
// }
return fileMenu;
}
List<Object> createActions(List<Action> actions) {
List<Object> items = new ArrayList<>();
for (Action action : actions) {
items.add(createAction(action));
}
return items;
}
List<Object> createActions(Action[] actions) {
return createActions(Arrays.asList(actions));
}
JsonComponent createAction(Action action) {
JsonComponent item;
if (action instanceof ActionGroup) {
ActionGroup actionGroup = (ActionGroup) action;
item = new JsonAction.JsonActionGroup();
item.put("items", createActions(actionGroup.getItems()));
} else {
item = new JsonAction(action);
}
item.put("name", action.getName());
return item;
}
public void registerId(Object o) {
if (o instanceof JsonComponent) {
JsonComponent component = (JsonComponent) o;
String id = component.getId();
if (id != null) {
componentById.put(component.getId(), component);
}
}
if (o instanceof Map) {
Map map = (Map) o;
for (Object o2 : map.values()) {
registerId(o2);
}
}
if (o instanceof List) {
List list = (List) o;
for (Object o2 : list) {
registerId(o2);
}
}
}
private Map<String, Object> createObjectMenu(Page page) {
if (page instanceof ObjectPage) {
ActionGroup actionGroup = ((ObjectPage<?>) page).getMenu();
if (actionGroup != null && actionGroup.getItems() != null) {
Map<String, Object> objectMenu = createAction(actionGroup);
objectMenu.put("items", createActions(actionGroup.getItems()));
return objectMenu;
}
}
return null;
}
private Map<String, Object> createMenu(String resourceName) {
Map<String, Object> menu = new LinkedHashMap<>();
menu.put("name", Resources.getString("Menu." + resourceName));
String description = Resources.getString("Menu." + resourceName + ".description");
if (!StringUtils.isEmpty(description)) {
menu.put("description", description);
}
return menu;
}
public void openDialog(JsonDialog jsonDialog) {
registerId(jsonDialog);
output.add("dialog", jsonDialog);
}
public void closeDialog(String id) {
output.add("closeDialog", id);
}
public void propertyChange(String componentId, String property, Object value) {
output.propertyChange(componentId, property, value);
}
}
| JsonClientToolkit: implemented refresh | src/main/java/org/minimalj/frontend/json/JsonClientSession.java | JsonClientToolkit: implemented refresh |
|
Java | apache-2.0 | d6ea4c1dedb731f19877adbdb825a7637040a949 | 0 | apache/pdfbox,kalaspuffar/pdfbox,kalaspuffar/pdfbox,apache/pdfbox | /*
* Copyright 2014 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.filter;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.io.IOUtils;
/**
* Helper class to contain predictor decoding used by Flate and LZW filter.
* To see the history, look at the FlateFilter class.
*/
public final class Predictor
{
private Predictor()
{
}
/**
* Decodes a single line of data in-place.
* @param predictor Predictor value for the current line
* @param colors Number of color components, from decode parameters.
* @param bitsPerComponent Number of bits per components, from decode parameters.
* @param columns Number samples in a row, from decode parameters.
* @param actline Current (active) line to decode. Data will be decoded in-place,
* i.e. - the contents of this buffer will be modified.
* @param lastline The previous decoded line. When decoding the first line, this
* parameter should be an empty byte array of the same length as
* <code>actline</code>.
*/
static void decodePredictorRow(int predictor, int colors, int bitsPerComponent, int columns, byte[] actline, byte[] lastline)
{
if (predictor == 1)
{
// no prediction
return;
}
final int bitsPerPixel = colors * bitsPerComponent;
final int bytesPerPixel = (bitsPerPixel + 7) / 8;
final int rowlength = actline.length;
switch (predictor)
{
case 2:
// PRED TIFF SUB
if (bitsPerComponent == 8)
{
// for 8 bits per component it is the same algorithm as PRED SUB of PNG format
for (int p = bytesPerPixel; p < rowlength; p++)
{
int sub = actline[p] & 0xff;
int left = actline[p - bytesPerPixel] & 0xff;
actline[p] = (byte) (sub + left);
}
break;
}
if (bitsPerComponent == 16)
{
for (int p = bytesPerPixel; p < rowlength; p += 2)
{
int sub = ((actline[p] & 0xff) << 8) + (actline[p + 1] & 0xff);
int left = (((actline[p - bytesPerPixel] & 0xff) << 8)
+ (actline[p - bytesPerPixel + 1] & 0xff));
actline[p] = (byte) (((sub + left) >> 8) & 0xff);
actline[p + 1] = (byte) ((sub + left) & 0xff);
}
break;
}
if (bitsPerComponent == 1 && colors == 1)
{
// bytesPerPixel cannot be used:
// "A row shall occupy a whole number of bytes, rounded up if necessary.
// Samples and their components shall be packed into bytes
// from high-order to low-order bits."
for (int p = 0; p < rowlength; p++)
{
for (int bit = 7; bit >= 0; --bit)
{
int sub = (actline[p] >> bit) & 1;
if (p == 0 && bit == 7)
{
continue;
}
int left;
if (bit == 7)
{
// use bit #0 from previous byte
left = actline[p - 1] & 1;
}
else
{
// use "previous" bit
left = (actline[p] >> (bit + 1)) & 1;
}
if (((sub + left) & 1) == 0)
{
// reset bit
actline[p] = (byte) (actline[p] & ~(1 << bit));
}
else
{
// set bit
actline[p] = (byte) (actline[p] | (1 << bit));
}
}
}
break;
}
// everything else, i.e. bpc 2 and 4, but has been tested for bpc 1 and 8 too
int elements = columns * colors;
for (int p = colors; p < elements; ++p)
{
int bytePosSub = p * bitsPerComponent / 8;
int bitPosSub = 8 - p * bitsPerComponent % 8 - bitsPerComponent;
int bytePosLeft = (p - colors) * bitsPerComponent / 8;
int bitPosLeft = 8 - (p - colors) * bitsPerComponent % 8 - bitsPerComponent;
int sub = getBitSeq(actline[bytePosSub], bitPosSub, bitsPerComponent);
int left = getBitSeq(actline[bytePosLeft], bitPosLeft, bitsPerComponent);
actline[bytePosSub] = (byte) calcSetBitSeq(actline[bytePosSub], bitPosSub, bitsPerComponent, sub + left);
}
break;
case 10:
// PRED NONE
// do nothing
break;
case 11:
// PRED SUB
for (int p = bytesPerPixel; p < rowlength; p++)
{
int sub = actline[p];
int left = actline[p - bytesPerPixel];
actline[p] = (byte) (sub + left);
}
break;
case 12:
// PRED UP
for (int p = 0; p < rowlength; p++)
{
int up = actline[p] & 0xff;
int prior = lastline[p] & 0xff;
actline[p] = (byte) ((up + prior) & 0xff);
}
break;
case 13:
// PRED AVG
for (int p = 0; p < rowlength; p++)
{
int avg = actline[p] & 0xff;
int left = p - bytesPerPixel >= 0 ? actline[p - bytesPerPixel] & 0xff : 0;
int up = lastline[p] & 0xff;
actline[p] = (byte) ((avg + (left + up) / 2) & 0xff);
}
break;
case 14:
// PRED PAETH
for (int p = 0; p < rowlength; p++)
{
int paeth = actline[p] & 0xff;
int a = p - bytesPerPixel >= 0 ? actline[p - bytesPerPixel] & 0xff : 0;// left
int b = lastline[p] & 0xff;// upper
int c = p - bytesPerPixel >= 0 ? lastline[p - bytesPerPixel] & 0xff : 0;// upperleft
int value = a + b - c;
int absa = Math.abs(value - a);
int absb = Math.abs(value - b);
int absc = Math.abs(value - c);
if (absa <= absb && absa <= absc)
{
actline[p] = (byte) ((paeth + a) & 0xff);
}
else if (absb <= absc)
{
actline[p] = (byte) ((paeth + b) & 0xff);
}
else
{
actline[p] = (byte) ((paeth + c) & 0xff);
}
}
break;
default:
break;
}
}
static void decodePredictor(int predictor, int colors, int bitsPerComponent, int columns, InputStream in, OutputStream out)
throws IOException
{
if (predictor == 1)
{
// no prediction
IOUtils.copy(in, out);
}
else
{
// calculate sizes
final int rowlength = calculateRowLength(colors, bitsPerComponent, columns);
byte[] actline = new byte[rowlength];
byte[] lastline = new byte[rowlength];
int linepredictor = predictor;
while (in.available() > 0)
{
// test for PNG predictor; each value >= 10 (not only 15) indicates usage of PNG predictor
if (predictor >= 10)
{
// PNG predictor; each row starts with predictor type (0, 1, 2, 3, 4)
// read per line predictor
linepredictor = in.read();
if (linepredictor == -1)
{
return;
}
// add 10 to tread value 0 as 10, 1 as 11, ...
linepredictor += 10;
}
// read line
int i, offset = 0;
while (offset < rowlength && ((i = in.read(actline, offset, rowlength - offset)) != -1))
{
offset += i;
}
decodePredictorRow(linepredictor, colors, bitsPerComponent, columns, actline, lastline);
System.arraycopy(actline, 0, lastline, 0, rowlength);
out.write(actline);
}
}
}
static int calculateRowLength(int colors, int bitsPerComponent, int columns)
{
final int bitsPerPixel = colors * bitsPerComponent;
return (columns * bitsPerPixel + 7) / 8;
}
// get value from bit interval from a byte
static int getBitSeq(int by, int startBit, int bitSize)
{
int mask = ((1 << bitSize) - 1);
return (by >>> startBit) & mask;
}
// set value in a bit interval and return that value
static int calcSetBitSeq(int by, int startBit, int bitSize, int val)
{
int mask = ((1 << bitSize) - 1);
int truncatedVal = val & mask;
mask = ~(mask << startBit);
return (by & mask) | (truncatedVal << startBit);
}
/**
* Wraps and <code>OutputStream</code> in a predictor decoding stream as necessary.
* If no predictor is specified by the parameters, the original stream is returned as is.
*
* @param out The stream to which decoded data should be written
* @param decodeParams Decode parameters for the stream
* @return An <code>OutputStream</code> is returned, which will write decoded data
* into the given stream. If no predictor is specified, the original stream is returned.
*/
static OutputStream wrapPredictor(OutputStream out, COSDictionary decodeParams)
{
int predictor = decodeParams.getInt(COSName.PREDICTOR);
if (predictor > 1)
{
int colors = Math.min(decodeParams.getInt(COSName.COLORS, 1), 32);
int bitsPerPixel = decodeParams.getInt(COSName.BITS_PER_COMPONENT, 8);
int columns = decodeParams.getInt(COSName.COLUMNS, 1);
return new PredictorOutputStream(out, predictor, colors, bitsPerPixel, columns);
}
else
{
return out;
}
}
/**
* Output stream that implements predictor decoding. Data is buffered until a complete
* row is available, which is then decoded and written to the underlying stream.
* The previous row is retained for decoding the next row.
*/
private static final class PredictorOutputStream extends FilterOutputStream
{
// current predictor type
private int predictor;
// image decode parameters
private final int colors, bitsPerComponent, columns, rowLength;
// PNG predictor (predictor>=10) means every row has a (potentially different)
// predictor value
private final boolean predictorPerRow;
// data buffers
private byte[] currentRow, lastRow;
// amount of data in the current row
private int currentRowData = 0;
// was the per-row predictor value read for the current row being processed
private boolean predictorRead = false;
PredictorOutputStream(OutputStream out, int predictor, int colors, int bitsPerComponent, int columns)
{
super(out);
this.predictor = predictor;
this.colors = colors;
this.bitsPerComponent = bitsPerComponent;
this.columns = columns;
this.rowLength = calculateRowLength(colors, bitsPerComponent, columns);
this.predictorPerRow = (predictor >= 10);
currentRow = new byte[rowLength];
lastRow = new byte[rowLength];
}
@Override
public void write(byte[] bytes) throws IOException
{
write(bytes, 0, bytes.length);
}
@Override
public void write(byte[] bytes, int off, int len) throws IOException
{
int currentOffset = off;
int maxOffset = currentOffset + len;
while (currentOffset < maxOffset)
{
if (predictorPerRow && currentRowData == 0 && !predictorRead)
{
// PNG predictor; each row starts with predictor type (0, 1, 2, 3, 4)
// read per line predictor, add 10 to tread value 0 as 10, 1 as 11, ...
predictor = bytes[currentOffset] + 10;
currentOffset++;
predictorRead = true;
}
else
{
int toRead = Math.min(rowLength - currentRowData, maxOffset - currentOffset);
System.arraycopy(bytes, currentOffset, currentRow, currentRowData, toRead);
currentRowData += toRead;
currentOffset += toRead;
// current row is filled, decode it, write it to underlying stream,
// and reset the state.
if (currentRowData == currentRow.length)
{
decodeAndWriteRow();
}
}
}
}
private void decodeAndWriteRow() throws IOException
{
decodePredictorRow(predictor, colors, bitsPerComponent, columns, currentRow, lastRow);
out.write(currentRow);
flipRows();
}
/**
* Flips the row buffers (to avoid copying), and resets the current-row index
* and predictorRead flag
*/
private void flipRows()
{
byte[] temp = lastRow;
lastRow = currentRow;
currentRow = temp;
currentRowData = 0;
predictorRead = false;
}
@Override
public void flush() throws IOException
{
// The last row is allowed to be incomplete, and should be completed with zeros.
if (currentRowData > 0)
{
Arrays.fill(currentRow, currentRowData, rowLength, (byte)0);
decodeAndWriteRow();
}
super.flush();
}
@Override
public void close() throws IOException
{
super.close();
}
@Override
public void write(int i) throws IOException
{
throw new UnsupportedOperationException("Not supported");
}
}
}
| pdfbox/src/main/java/org/apache/pdfbox/filter/Predictor.java | /*
* Copyright 2014 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.filter;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.io.IOUtils;
/**
* Helper class to contain predictor decoding used by Flate and LZW filter.
* To see the history, look at the FlateFilter class.
*/
public final class Predictor
{
private Predictor()
{
}
/**
* Decodes a single line of data in-place.
* @param predictor Predictor value for the current line
* @param colors Number of color components, from decode parameters.
* @param bitsPerComponent Number of bits per components, from decode parameters.
* @param columns Number samples in a row, from decode parameters.
* @param actline Current (active) line to decode. Data will be decoded in-place,
* i.e. - the contents of this buffer will be modified.
* @param lastline The previous decoded line. When decoding the first line, this
* parameter should be an empty byte array of the same length as
* <code>actline</code>.
*/
static void decodePredictorRow(int predictor, int colors, int bitsPerComponent, int columns, byte[] actline, byte[] lastline)
{
if (predictor == 1)
{
// no prediction
return;
}
final int bitsPerPixel = colors * bitsPerComponent;
final int bytesPerPixel = (bitsPerPixel + 7) / 8;
final int rowlength = actline.length;
switch (predictor)
{
case 2:
// PRED TIFF SUB
if (bitsPerComponent == 8)
{
// for 8 bits per component it is the same algorithm as PRED SUB of PNG format
for (int p = bytesPerPixel; p < rowlength; p++)
{
int sub = actline[p] & 0xff;
int left = actline[p - bytesPerPixel] & 0xff;
actline[p] = (byte) (sub + left);
}
break;
}
if (bitsPerComponent == 16)
{
for (int p = bytesPerPixel; p < rowlength; p += 2)
{
int sub = ((actline[p] & 0xff) << 8) + (actline[p + 1] & 0xff);
int left = (((actline[p - bytesPerPixel] & 0xff) << 8)
+ (actline[p - bytesPerPixel + 1] & 0xff));
actline[p] = (byte) (((sub + left) >> 8) & 0xff);
actline[p + 1] = (byte) ((sub + left) & 0xff);
}
break;
}
if (bitsPerComponent == 1 && colors == 1)
{
// bytesPerPixel cannot be used:
// "A row shall occupy a whole number of bytes, rounded up if necessary.
// Samples and their components shall be packed into bytes
// from high-order to low-order bits."
for (int p = 0; p < rowlength; p++)
{
for (int bit = 7; bit >= 0; --bit)
{
int sub = (actline[p] >> bit) & 1;
if (p == 0 && bit == 7)
{
continue;
}
int left;
if (bit == 7)
{
// use bit #0 from previous byte
left = actline[p - 1] & 1;
}
else
{
// use "previous" bit
left = (actline[p] >> (bit + 1)) & 1;
}
if (((sub + left) & 1) == 0)
{
// reset bit
actline[p] = (byte) (actline[p] & ~(1 << bit));
}
else
{
// set bit
actline[p] = (byte) (actline[p] | (1 << bit));
}
}
}
break;
}
// everything else, i.e. bpc 2 and 4, but has been tested for bpc 1 and 8 too
int elements = columns * colors;
for (int p = colors; p < elements; ++p)
{
int bytePosSub = p * bitsPerComponent / 8;
int bitPosSub = 8 - p * bitsPerComponent % 8 - bitsPerComponent;
int bytePosLeft = (p - colors) * bitsPerComponent / 8;
int bitPosLeft = 8 - (p - colors) * bitsPerComponent % 8 - bitsPerComponent;
int sub = getBitSeq(actline[bytePosSub], bitPosSub, bitsPerComponent);
int left = getBitSeq(actline[bytePosLeft], bitPosLeft, bitsPerComponent);
actline[bytePosSub] = (byte) calcSetBitSeq(actline[bytePosSub], bitPosSub, bitsPerComponent, sub + left);
}
break;
case 10:
// PRED NONE
// do nothing
break;
case 11:
// PRED SUB
for (int p = bytesPerPixel; p < rowlength; p++)
{
int sub = actline[p];
int left = actline[p - bytesPerPixel];
actline[p] = (byte) (sub + left);
}
break;
case 12:
// PRED UP
for (int p = 0; p < rowlength; p++)
{
int up = actline[p] & 0xff;
int prior = lastline[p] & 0xff;
actline[p] = (byte) ((up + prior) & 0xff);
}
break;
case 13:
// PRED AVG
for (int p = 0; p < rowlength; p++)
{
int avg = actline[p] & 0xff;
int left = p - bytesPerPixel >= 0 ? actline[p - bytesPerPixel] & 0xff : 0;
int up = lastline[p] & 0xff;
actline[p] = (byte) ((avg + (left + up) / 2) & 0xff);
}
break;
case 14:
// PRED PAETH
for (int p = 0; p < rowlength; p++)
{
int paeth = actline[p] & 0xff;
int a = p - bytesPerPixel >= 0 ? actline[p - bytesPerPixel] & 0xff : 0;// left
int b = lastline[p] & 0xff;// upper
int c = p - bytesPerPixel >= 0 ? lastline[p - bytesPerPixel] & 0xff : 0;// upperleft
int value = a + b - c;
int absa = Math.abs(value - a);
int absb = Math.abs(value - b);
int absc = Math.abs(value - c);
if (absa <= absb && absa <= absc)
{
actline[p] = (byte) ((paeth + a) & 0xff);
}
else if (absb <= absc)
{
actline[p] = (byte) ((paeth + b) & 0xff);
}
else
{
actline[p] = (byte) ((paeth + c) & 0xff);
}
}
break;
default:
break;
}
}
static void decodePredictor(int predictor, int colors, int bitsPerComponent, int columns, InputStream in, OutputStream out)
throws IOException
{
if (predictor == 1)
{
// no prediction
IOUtils.copy(in, out);
}
else
{
// calculate sizes
final int rowlength = calculateRowLength(colors, bitsPerComponent, columns);
byte[] actline = new byte[rowlength];
byte[] lastline = new byte[rowlength];
int linepredictor = predictor;
while (in.available() > 0)
{
// test for PNG predictor; each value >= 10 (not only 15) indicates usage of PNG predictor
if (predictor >= 10)
{
// PNG predictor; each row starts with predictor type (0, 1, 2, 3, 4)
// read per line predictor
linepredictor = in.read();
if (linepredictor == -1)
{
return;
}
// add 10 to tread value 0 as 10, 1 as 11, ...
linepredictor += 10;
}
// read line
int i, offset = 0;
while (offset < rowlength && ((i = in.read(actline, offset, rowlength - offset)) != -1))
{
offset += i;
}
decodePredictorRow(linepredictor, colors, bitsPerComponent, columns, actline, lastline);
System.arraycopy(actline, 0, lastline, 0, rowlength);
out.write(actline);
}
}
}
static int calculateRowLength(int colors, int bitsPerComponent, int columns)
{
final int bitsPerPixel = colors * bitsPerComponent;
return (columns * bitsPerPixel + 7) / 8;
}
// get value from bit interval from a byte
static int getBitSeq(int by, int startBit, int bitSize)
{
int mask = ((1 << bitSize) - 1);
return (by >>> startBit) & mask;
}
// set value in a bit interval and return that value
static int calcSetBitSeq(int by, int startBit, int bitSize, int val)
{
int mask = ((1 << bitSize) - 1);
int truncatedVal = val & mask;
mask = ~(mask << startBit);
return (by & mask) | (truncatedVal << startBit);
}
/**
* Wraps and <code>OutputStream</code> in a predictor decoding stream as necessary.
* If no predictor is specified by the parameters, the original stream is returned as is.
*
* @param out The stream to which decoded data should be written
* @param decodeParams Decode parameters for the stream
* @return An <code>OutputStream</code> is returned, which will write decoded data
* into the given stream. If no predictor is specified, the original stream is returned.
*/
static OutputStream wrapPredictor(OutputStream out, COSDictionary decodeParams)
{
int predictor = decodeParams.getInt(COSName.PREDICTOR);
if (predictor > 1)
{
int colors = Math.min(decodeParams.getInt(COSName.COLORS, 1), 32);
int bitsPerPixel = decodeParams.getInt(COSName.BITS_PER_COMPONENT, 8);
int columns = decodeParams.getInt(COSName.COLUMNS, 1);
return new PredictorOutputStream(out, predictor, colors, bitsPerPixel, columns);
}
else
{
return out;
}
}
/**
* Output stream that implements predictor decoding. Data is buffered until a complete
* row is available, which is then decoded and written to the underlying stream.
* The previous row is retained for decoding the next row.
*/
private static final class PredictorOutputStream extends FilterOutputStream
{
// current predictor type
private int predictor;
// image decode parameters
private final int colors, bitsPerComponent, columns, rowLength;
// PNG predictor (predictor>=10) means every row has a (potentially different)
// predictor value
private final boolean predictorPerRow;
// data buffers
private byte[] currentRow, lastRow;
// amount of data in the current row
private int currentRowData = 0;
// was the per-row predictor value read for the current row being processed
private boolean predictorRead = false;
PredictorOutputStream(OutputStream out, int predictor, int colors, int bitsPerComponent, int columns)
{
super(out);
this.predictor = predictor;
this.colors = colors;
this.bitsPerComponent = bitsPerComponent;
this.columns = columns;
this.rowLength = calculateRowLength(colors, bitsPerComponent, columns);
this.predictorPerRow = (predictor >= 10);
currentRow = new byte[rowLength];
lastRow = new byte[rowLength];
}
@Override
public void write(byte[] bytes) throws IOException
{
write(bytes, 0, bytes.length);
}
@Override
public void write(byte[] bytes, int off, int len) throws IOException
{
int maxOffset = off + len;
while (off < maxOffset)
{
if (predictorPerRow && currentRowData == 0 && !predictorRead)
{
// PNG predictor; each row starts with predictor type (0, 1, 2, 3, 4)
// read per line predictor, add 10 to tread value 0 as 10, 1 as 11, ...
predictor = bytes[off] + 10;
off++;
predictorRead = true;
}
else
{
int toRead = Math.min(rowLength - currentRowData, maxOffset - off);
System.arraycopy(bytes, off, currentRow, currentRowData, toRead);
currentRowData += toRead;
off += toRead;
// current row is filled, decode it, write it to underlying stream,
// and reset the state.
if (currentRowData == currentRow.length)
{
decodeAndWriteRow();
}
}
}
}
private void decodeAndWriteRow() throws IOException
{
decodePredictorRow(predictor, colors, bitsPerComponent, columns, currentRow, lastRow);
out.write(currentRow);
flipRows();
}
/**
* Flips the row buffers (to avoid copying), and resets the current-row index
* and predictorRead flag
*/
private void flipRows()
{
byte[] temp = lastRow;
lastRow = currentRow;
currentRow = temp;
currentRowData = 0;
predictorRead = false;
}
@Override
public void flush() throws IOException
{
// The last row is allowed to be incomplete, and should be completed with zeros.
if (currentRowData > 0)
{
Arrays.fill(currentRow, currentRowData, rowLength, (byte)0);
decodeAndWriteRow();
}
super.flush();
}
@Override
public void close() throws IOException
{
super.close();
}
@Override
public void write(int i) throws IOException
{
throw new UnsupportedOperationException("Not supported");
}
}
}
| PDFBOX-4071: don't reuse method parameter
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1827693 13f79535-47bb-0310-9956-ffa450edef68
| pdfbox/src/main/java/org/apache/pdfbox/filter/Predictor.java | PDFBOX-4071: don't reuse method parameter |
|
Java | apache-2.0 | 7223fa5c9f715fcb07f3f363e8bc07c717a44e4f | 0 | semonte/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,signed/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,xfournet/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,fitermay/intellij-community,clumsy/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,da1z/intellij-community,xfournet/intellij-community,signed/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,allotria/intellij-community,FHannes/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,hurricup/intellij-community,xfournet/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,fitermay/intellij-community,signed/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,asedunov/intellij-community,apixandru/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,asedunov/intellij-community,signed/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,hurricup/intellij-community,allotria/intellij-community,signed/intellij-community,ibinti/intellij-community,kdwink/intellij-community,retomerz/intellij-community,signed/intellij-community,da1z/intellij-community,clumsy/intellij-community,allotria/intellij-community,signed/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,kdwink/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,asedunov/intellij-community,signed/intellij-community,da1z/intellij-community,allotria/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,allotria/intellij-community,hurricup/intellij-community,da1z/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,da1z/intellij-community,retomerz/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,allotria/intellij-community,kdwink/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,allotria/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,apixandru/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,ibinti/intellij-community,apixandru/intellij-community,signed/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,apixandru/intellij-community,apixandru/intellij-community,allotria/intellij-community,asedunov/intellij-community,signed/intellij-community,da1z/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,clumsy/intellij-community,xfournet/intellij-community,xfournet/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,ibinti/intellij-community,fitermay/intellij-community,clumsy/intellij-community,retomerz/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,xfournet/intellij-community,semonte/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,signed/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,hurricup/intellij-community,fitermay/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,fitermay/intellij-community,clumsy/intellij-community,ibinti/intellij-community,kdwink/intellij-community,semonte/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,signed/intellij-community,FHannes/intellij-community,kdwink/intellij-community,signed/intellij-community,clumsy/intellij-community,kdwink/intellij-community,hurricup/intellij-community,semonte/intellij-community,asedunov/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,semonte/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,ibinti/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.editorActions.enter;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.codeInsight.highlighting.BraceMatcher;
import com.intellij.codeInsight.highlighting.BraceMatchingUtil;
import com.intellij.lang.ASTNode;
import com.intellij.lang.Language;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.editor.highlighter.HighlighterIterator;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.TokenType;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NotNull;
public class EnterAfterUnmatchedBraceHandler extends EnterHandlerDelegateAdapter {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.editorActions.enter.EnterAfterUnmatchedBraceHandler");
@Override
public Result preprocessEnter(@NotNull final PsiFile file,
@NotNull final Editor editor,
@NotNull final Ref<Integer> caretOffsetRef,
@NotNull final Ref<Integer> caretAdvance,
@NotNull final DataContext dataContext,
final EditorActionHandler originalHandler) {
int caretOffset = caretOffsetRef.get();
if (!isApplicable(file, caretOffset)) {
return Result.Continue;
}
int maxRBraceCount = getMaxRBraceCount(file, editor, caretOffset);
if (maxRBraceCount > 0) {
insertRBraces(file, editor,
caretOffset,
getRBraceOffset(file, editor, caretOffset),
generateStringToInsert(editor, caretOffset, maxRBraceCount));
return Result.DefaultForceIndent;
}
return Result.Continue;
}
/**
* Checks that the text context is in responsibility of the handler.
*
* @param file target PSI file
* @param caretOffset target caret offset
* @return true, if handler is in charge
*/
public boolean isApplicable(@NotNull PsiFile file, int caretOffset) {
return true;
}
/**
* Calculates the maximum number of '}' that can be inserted by handler.
* Can return <code>0</code> or less in custom implementation to skip '}' insertion in the <code>preprocessEnter</code> call
* and switch to default implementation.
*
* @param file target PSI file
* @param editor target editor
* @param caretOffset target caret offset
* @return maximum number of '}' that can be inserted by handler, <code>0</code> or less to switch to default implementation
*/
protected int getMaxRBraceCount(@NotNull final PsiFile file, @NotNull final Editor editor, int caretOffset) {
if (!CodeInsightSettings.getInstance().INSERT_BRACE_ON_ENTER) {
return 0;
}
return Math.max(0, getUnmatchedLBracesNumberBefore(editor, caretOffset, file.getFileType()));
}
/**
* Calculates the string of '}' that have to be inserted by handler.
* Some languages can expand the string by additional characters (i.e. '\', ';')
*
* @param editor target editor
* @param caretOffset target caret offset
* @param maxRBraceCount the maximum number of '}' for insert at position, it always positive
* @return the string of '}' that has to be inserted by handler, it must have at least one '}'
*/
@NotNull
protected String generateStringToInsert(@NotNull final Editor editor, int caretOffset, int maxRBraceCount) {
assert maxRBraceCount > 0;
CharSequence text = editor.getDocument().getCharsSequence();
int bracesToInsert = 0;
for (int i = caretOffset - 1; i >= 0 && bracesToInsert < maxRBraceCount; --i) {
final char c = text.charAt(i);
if (c == '{') {
++bracesToInsert;
}
else if (isStopChar(c)) {
break;
}
}
return StringUtil.repeatSymbol('}', Math.max(bracesToInsert, 1));
}
/**
* Checks the character before the inserted '}' to reduce the count of inserted '}'.
* The number of inserted '}' will increase for each found '{'.
*
* @param c character to check
* @return true, to stop back iteration
*/
protected boolean isStopChar(char c) {
return " \n\t".indexOf(c) < 0;
}
/**
* Calculates the position for insertion of one or more '}'.
*
* @param file target PSI file
* @param editor target editor
* @param caretOffset target caret offset
* @return the position between <code>caretOffset</code> and the end of file
*/
protected int getRBraceOffset(@NotNull final PsiFile file, @NotNull final Editor editor, int caretOffset) {
CharSequence text = editor.getDocument().getCharsSequence();
int offset = CharArrayUtil.shiftForward(text, caretOffset, " \t");
final int fileLength = text.length();
if (offset < fileLength && ")];,%<?".indexOf(text.charAt(offset)) < 0) {
offset = calculateOffsetToInsertClosingBrace(file, text, offset).second;
//offset = CharArrayUtil.shiftForwardUntil(text, caretOffset, "\n");
}
return Math.min(offset, fileLength);
}
/**
* Inserts the <code>generatedRBraces</code> at the <code>rBracesInsertOffset</code> position and formats the code block.
* @param file target PSI file
* @param editor target editor
* @param caretOffset target caret offset
* @param rBracesInsertOffset target position to insert
* @param generatedRBraces string of '}' to insert
*/
protected void insertRBraces(@NotNull PsiFile file,
@NotNull Editor editor,
int caretOffset,
int rBracesInsertOffset,
String generatedRBraces) {
final Document document = editor.getDocument();
insertRBracesAtPosition(document, caretOffset, rBracesInsertOffset, generatedRBraces);
formatCodeFragmentBetweenBraces(file, document, caretOffset, rBracesInsertOffset, generatedRBraces);
}
/**
* Inserts the <code>rBracesCount</code> of '}' at the <code>rBracesInsertOffset</code> position.
*
* @param document target document
* @param caretOffset target caret offset
* @param rBracesInsertOffset target position to insert
* @param generatedRBraces string of '}' to insert
*/
protected void insertRBracesAtPosition(Document document, int caretOffset, int rBracesInsertOffset, String generatedRBraces) {
document.insertString(rBracesInsertOffset, "\n" + generatedRBraces);
// We need to adjust indents of the text that will be moved, hence, need to insert preliminary line feed.
// Example:
// if (test1()) {
// } else {<caret> if (test2()) {
// foo();
// }
// We insert here '\n}' after 'foo();' and have the following:
// if (test1()) {
// } else { if (test2()) {
// foo();
// }
// }
// That is formatted incorrectly because line feed between 'else' and 'if' is not inserted yet (whole 'if' block is indent anchor
// to 'if' code block('{}')). So, we insert temporary line feed between 'if' and 'else', correct indent and remove that temporary
// line feed.
document.insertString(caretOffset, "\n");
}
/**
* Formats the code block between caret and inserted braces.
*
* @param file target PSI file
* @param document target document
* @param caretOffset target caret offset
* @param rBracesInsertOffset target position to insert
* @param generatedRBraces string of '}' to insert
*/
protected void formatCodeFragmentBetweenBraces(@NotNull PsiFile file,
@NotNull Document document,
int caretOffset,
int rBracesInsertOffset,
String generatedRBraces) {
Project project = file.getProject();
long stamp = document.getModificationStamp();
boolean closingBraceIndentAdjusted;
try {
PsiDocumentManager.getInstance(project).commitDocument(document);
CodeStyleManager.getInstance(project).adjustLineIndent(file, new TextRange(caretOffset, rBracesInsertOffset + 2));
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
finally {
closingBraceIndentAdjusted = stamp != document.getModificationStamp();
// do you remember that we insert the '\n'? here we take it back!
document.deleteString(caretOffset, caretOffset + 1);
}
// There is a possible case that formatter was unable to adjust line indent for the closing brace (that is the case for plain text
// document for example). Hence, we're trying to do the manually.
if (!closingBraceIndentAdjusted) {
int line = document.getLineNumber(rBracesInsertOffset);
StringBuilder buffer = new StringBuilder();
int start = document.getLineStartOffset(line);
int end = document.getLineEndOffset(line);
final CharSequence text = document.getCharsSequence();
for (int i = start; i < end; i++) {
char c = text.charAt(i);
if (c != ' ' && c != '\t') {
break;
}
else {
buffer.append(c);
}
}
if (buffer.length() > 0) {
document.insertString(rBracesInsertOffset + 1, buffer);
}
}
}
/**
* Current handler inserts closing curly brace (right brace) if necessary. There is a possible case that it should be located
* more than one line forward.
* <p/>
* <b>Example</b>
* <pre>
* if (test1()) {
* } else {<caret> if (test2()) {
* foo();
* }
* </pre>
* <p/>
* We want to get this after the processing:
* <pre>
* if (test1()) {
* } else {
* if (test2()) {
* foo();
* }
* }
* </pre>
* I.e. closing brace should be inserted two lines below current caret line. Hence, we need to calculate correct offset
* to use for brace inserting. This method is responsible for that.
* <p/>
* In essence it inspects PSI structure and finds PSE elements with the max length that starts at caret offset. End offset
* of that element is used as an insertion point.
*
* @param file target PSI file
* @param text text from the given file
* @param offset target offset where line feed will be inserted
* @return pair of (element, offset). The element is the '}' owner, if applicable; the offset is the position for inserting closing brace
*/
protected Pair<PsiElement, Integer> calculateOffsetToInsertClosingBrace(@NotNull PsiFile file, @NotNull CharSequence text, final int offset) {
PsiElement element = PsiUtilCore.getElementAtOffset(file, offset);
ASTNode node = element.getNode();
if (node != null && node.getElementType() == TokenType.WHITE_SPACE) {
return Pair.create(null, CharArrayUtil.shiftForwardUntil(text, offset, "\n"));
}
for (PsiElement parent = element.getParent(); parent != null; parent = parent.getParent()) {
ASTNode parentNode = parent.getNode();
if (parentNode == null || parentNode.getStartOffset() != offset) {
break;
}
element = parent;
}
if (element.getTextOffset() != offset) {
return Pair.create(null, CharArrayUtil.shiftForwardUntil(text, offset, "\n"));
}
return Pair.create(element, element.getTextRange().getEndOffset());
}
public static boolean isAfterUnmatchedLBrace(Editor editor, int offset, FileType fileType) {
return getUnmatchedLBracesNumberBefore(editor, offset, fileType) > 0;
}
/**
* Calculates number of unmatched left braces before the given offset.
*
* @param editor target editor
* @param offset target offset
* @param fileType target file type
* @return number of unmatched braces before the given offset;
* negative value if it's not possible to perform the calculation or if there are no unmatched left braces before
* the given offset
*/
protected static int getUnmatchedLBracesNumberBefore(Editor editor, int offset, FileType fileType) {
if (offset == 0) {
return -1;
}
CharSequence chars = editor.getDocument().getCharsSequence();
if (chars.charAt(offset - 1) != '{') {
return -1;
}
EditorHighlighter highlighter = ((EditorEx)editor).getHighlighter();
HighlighterIterator iterator = highlighter.createIterator(offset - 1);
BraceMatcher braceMatcher = BraceMatchingUtil.getBraceMatcher(fileType, iterator);
if (!braceMatcher.isLBraceToken(iterator, chars, fileType) || !braceMatcher.isStructuralBrace(iterator, chars, fileType)) {
return -1;
}
Language language = iterator.getTokenType().getLanguage();
iterator = highlighter.createIterator(0);
int lBracesBeforeOffset = 0;
int lBracesAfterOffset = 0;
int rBracesBeforeOffset = 0;
int rBracesAfterOffset = 0;
for (; !iterator.atEnd(); iterator.advance()) {
IElementType tokenType = iterator.getTokenType();
if (!tokenType.getLanguage().equals(language) || !braceMatcher.isStructuralBrace(iterator, chars, fileType)) {
continue;
}
boolean beforeOffset = iterator.getStart() < offset;
if (braceMatcher.isLBraceToken(iterator, chars, fileType)) {
if (beforeOffset) {
lBracesBeforeOffset++;
}
else {
lBracesAfterOffset++;
}
}
else if (braceMatcher.isRBraceToken(iterator, chars, fileType)) {
if (beforeOffset) {
rBracesBeforeOffset++;
}
else {
rBracesAfterOffset++;
}
}
}
return lBracesBeforeOffset - rBracesBeforeOffset - (rBracesAfterOffset - lBracesAfterOffset);
}
}
| platform/lang-impl/src/com/intellij/codeInsight/editorActions/enter/EnterAfterUnmatchedBraceHandler.java | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.editorActions.enter;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.codeInsight.highlighting.BraceMatcher;
import com.intellij.codeInsight.highlighting.BraceMatchingUtil;
import com.intellij.lang.ASTNode;
import com.intellij.lang.Language;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.editor.highlighter.HighlighterIterator;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.TokenType;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NotNull;
public class EnterAfterUnmatchedBraceHandler extends EnterHandlerDelegateAdapter {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.editorActions.enter.EnterAfterUnmatchedBraceHandler");
@Override
public Result preprocessEnter(@NotNull final PsiFile file,
@NotNull final Editor editor,
@NotNull final Ref<Integer> caretOffsetRef,
@NotNull final Ref<Integer> caretAdvance,
@NotNull final DataContext dataContext,
final EditorActionHandler originalHandler) {
int caretOffset = caretOffsetRef.get();
if (!isApplicable(file, caretOffset)) {
return Result.Continue;
}
int maxRBraceCount = getMaxRBraceCount(file, editor, caretOffset);
if (maxRBraceCount > 0) {
insertRBraces(file, editor,
caretOffset,
getRBraceOffset(file, editor, caretOffset),
adjustRBraceCountForPosition(editor, caretOffset, maxRBraceCount));
return Result.DefaultForceIndent;
}
return Result.Continue;
}
/**
* Checks that the text context is in responsibility of the handler.
*
* @param file target PSI file
* @param caretOffset target caret offset
* @return true, if handler is in charge
*/
public boolean isApplicable(@NotNull PsiFile file, int caretOffset) {
return true;
}
/**
* Calculates the maximum number of '}' that can be inserted by handler.
* Can return <code>0</code> or less in custom implementation to skip '}' insertion in the <code>preprocessEnter</code> call
* and switch to default implementation.
*
* @param file target PSI file
* @param editor target editor
* @param caretOffset target caret offset
* @return maximum number of '}' that can be inserted by handler, <code>0</code> or less to switch to default implementation
*/
protected int getMaxRBraceCount(@NotNull final PsiFile file, @NotNull final Editor editor, int caretOffset) {
if (!CodeInsightSettings.getInstance().INSERT_BRACE_ON_ENTER) {
return 0;
}
return Math.max(0, getUnmatchedLBracesNumberBefore(editor, caretOffset, file.getFileType()));
}
/**
* Calculates the string of '}' that have to be inserted by handler.
* Some languages can expand the string by additional characters (i.e. '\', ';')
*
* @param editor target editor
* @param caretOffset target caret offset
* @param maxRBraceCount the maximum number of '}' for insert at position, it always positive
* @return the string of '}' that has to be inserted by handler, it must have at least one '}'
*/
protected String adjustRBraceCountForPosition(@NotNull final Editor editor, int caretOffset, int maxRBraceCount) {
assert maxRBraceCount > 0;
CharSequence text = editor.getDocument().getCharsSequence();
int bracesToInsert = 0;
for (int i = caretOffset - 1; i >= 0 && bracesToInsert < maxRBraceCount; --i) {
final char c = text.charAt(i);
if (c == '{') {
++bracesToInsert;
}
else if (isStopChar(c)) {
break;
}
}
return StringUtil.repeatSymbol('}', Math.max(bracesToInsert, 1));
}
/**
* Checks the character before the inserted '}' to reduce the count of inserted '}'.
* The number of inserted '}' will increase for each found '{'.
*
* @param c character to check
* @return true, to stop back iteration
*/
protected boolean isStopChar(char c) {
return " \n\t".indexOf(c) < 0;
}
/**
* Calculates the position for insertion of one or more '}'.
*
* @param file target PSI file
* @param editor target editor
* @param caretOffset target caret offset
* @return the position between <code>caretOffset</code> and the end of file
*/
protected int getRBraceOffset(@NotNull final PsiFile file, @NotNull final Editor editor, int caretOffset) {
CharSequence text = editor.getDocument().getCharsSequence();
int offset = CharArrayUtil.shiftForward(text, caretOffset, " \t");
final int fileLength = text.length();
if (offset < fileLength && ")];,%<?".indexOf(text.charAt(offset)) < 0) {
offset = calculateOffsetToInsertClosingBrace(file, text, offset).second;
//offset = CharArrayUtil.shiftForwardUntil(text, caretOffset, "\n");
}
return Math.min(offset, fileLength);
}
/**
* Inserts the <code>generatedRBraces</code> at the <code>rBracesInsertOffset</code> position and formats the code block.
* @param file target PSI file
* @param editor target editor
* @param caretOffset target caret offset
* @param rBracesInsertOffset target position to insert
* @param generatedRBraces string of '}' to insert
*/
protected void insertRBraces(@NotNull PsiFile file,
@NotNull Editor editor,
int caretOffset,
int rBracesInsertOffset,
String generatedRBraces) {
final Document document = editor.getDocument();
insertRBracesAtPosition(document, caretOffset, rBracesInsertOffset, generatedRBraces);
formatCodeFragmentBetweenBraces(file, document, caretOffset, rBracesInsertOffset, generatedRBraces);
}
/**
* Inserts the <code>rBracesCount</code> of '}' at the <code>rBracesInsertOffset</code> position.
*
* @param document target document
* @param caretOffset target caret offset
* @param rBracesInsertOffset target position to insert
* @param generatedRBraces string of '}' to insert
*/
protected void insertRBracesAtPosition(Document document, int caretOffset, int rBracesInsertOffset, String generatedRBraces) {
document.insertString(rBracesInsertOffset, "\n" + generatedRBraces);
// We need to adjust indents of the text that will be moved, hence, need to insert preliminary line feed.
// Example:
// if (test1()) {
// } else {<caret> if (test2()) {
// foo();
// }
// We insert here '\n}' after 'foo();' and have the following:
// if (test1()) {
// } else { if (test2()) {
// foo();
// }
// }
// That is formatted incorrectly because line feed between 'else' and 'if' is not inserted yet (whole 'if' block is indent anchor
// to 'if' code block('{}')). So, we insert temporary line feed between 'if' and 'else', correct indent and remove that temporary
// line feed.
document.insertString(caretOffset, "\n");
}
/**
* Formats the code block between caret and inserted braces.
*
* @param file target PSI file
* @param document target document
* @param caretOffset target caret offset
* @param rBracesInsertOffset target position to insert
* @param generatedRBraces string of '}' to insert
*/
protected void formatCodeFragmentBetweenBraces(@NotNull PsiFile file,
@NotNull Document document,
int caretOffset,
int rBracesInsertOffset,
String generatedRBraces) {
Project project = file.getProject();
long stamp = document.getModificationStamp();
boolean closingBraceIndentAdjusted;
try {
PsiDocumentManager.getInstance(project).commitDocument(document);
CodeStyleManager.getInstance(project).adjustLineIndent(file, new TextRange(caretOffset, rBracesInsertOffset + 2));
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
finally {
closingBraceIndentAdjusted = stamp != document.getModificationStamp();
// do you remember that we insert the '\n'? here we take it back!
document.deleteString(caretOffset, caretOffset + 1);
}
// There is a possible case that formatter was unable to adjust line indent for the closing brace (that is the case for plain text
// document for example). Hence, we're trying to do the manually.
if (!closingBraceIndentAdjusted) {
int line = document.getLineNumber(rBracesInsertOffset);
StringBuilder buffer = new StringBuilder();
int start = document.getLineStartOffset(line);
int end = document.getLineEndOffset(line);
final CharSequence text = document.getCharsSequence();
for (int i = start; i < end; i++) {
char c = text.charAt(i);
if (c != ' ' && c != '\t') {
break;
}
else {
buffer.append(c);
}
}
if (buffer.length() > 0) {
document.insertString(rBracesInsertOffset + 1, buffer);
}
}
}
/**
* Current handler inserts closing curly brace (right brace) if necessary. There is a possible case that it should be located
* more than one line forward.
* <p/>
* <b>Example</b>
* <pre>
* if (test1()) {
* } else {<caret> if (test2()) {
* foo();
* }
* </pre>
* <p/>
* We want to get this after the processing:
* <pre>
* if (test1()) {
* } else {
* if (test2()) {
* foo();
* }
* }
* </pre>
* I.e. closing brace should be inserted two lines below current caret line. Hence, we need to calculate correct offset
* to use for brace inserting. This method is responsible for that.
* <p/>
* In essence it inspects PSI structure and finds PSE elements with the max length that starts at caret offset. End offset
* of that element is used as an insertion point.
*
* @param file target PSI file
* @param text text from the given file
* @param offset target offset where line feed will be inserted
* @return pair of (element, offset). The element is the '}' owner, if applicable; the offset is the position for inserting closing brace
*/
protected Pair<PsiElement, Integer> calculateOffsetToInsertClosingBrace(@NotNull PsiFile file, @NotNull CharSequence text, final int offset) {
PsiElement element = PsiUtilCore.getElementAtOffset(file, offset);
ASTNode node = element.getNode();
if (node != null && node.getElementType() == TokenType.WHITE_SPACE) {
return Pair.create(null, CharArrayUtil.shiftForwardUntil(text, offset, "\n"));
}
for (PsiElement parent = element.getParent(); parent != null; parent = parent.getParent()) {
ASTNode parentNode = parent.getNode();
if (parentNode == null || parentNode.getStartOffset() != offset) {
break;
}
element = parent;
}
if (element.getTextOffset() != offset) {
return Pair.create(null, CharArrayUtil.shiftForwardUntil(text, offset, "\n"));
}
return Pair.create(element, element.getTextRange().getEndOffset());
}
public static boolean isAfterUnmatchedLBrace(Editor editor, int offset, FileType fileType) {
return getUnmatchedLBracesNumberBefore(editor, offset, fileType) > 0;
}
/**
* Calculates number of unmatched left braces before the given offset.
*
* @param editor target editor
* @param offset target offset
* @param fileType target file type
* @return number of unmatched braces before the given offset;
* negative value if it's not possible to perform the calculation or if there are no unmatched left braces before
* the given offset
*/
protected static int getUnmatchedLBracesNumberBefore(Editor editor, int offset, FileType fileType) {
if (offset == 0) {
return -1;
}
CharSequence chars = editor.getDocument().getCharsSequence();
if (chars.charAt(offset - 1) != '{') {
return -1;
}
EditorHighlighter highlighter = ((EditorEx)editor).getHighlighter();
HighlighterIterator iterator = highlighter.createIterator(offset - 1);
BraceMatcher braceMatcher = BraceMatchingUtil.getBraceMatcher(fileType, iterator);
if (!braceMatcher.isLBraceToken(iterator, chars, fileType) || !braceMatcher.isStructuralBrace(iterator, chars, fileType)) {
return -1;
}
Language language = iterator.getTokenType().getLanguage();
iterator = highlighter.createIterator(0);
int lBracesBeforeOffset = 0;
int lBracesAfterOffset = 0;
int rBracesBeforeOffset = 0;
int rBracesAfterOffset = 0;
for (; !iterator.atEnd(); iterator.advance()) {
IElementType tokenType = iterator.getTokenType();
if (!tokenType.getLanguage().equals(language) || !braceMatcher.isStructuralBrace(iterator, chars, fileType)) {
continue;
}
boolean beforeOffset = iterator.getStart() < offset;
if (braceMatcher.isLBraceToken(iterator, chars, fileType)) {
if (beforeOffset) {
lBracesBeforeOffset++;
}
else {
lBracesAfterOffset++;
}
}
else if (braceMatcher.isRBraceToken(iterator, chars, fileType)) {
if (beforeOffset) {
rBracesBeforeOffset++;
}
else {
rBracesAfterOffset++;
}
}
}
return lBracesBeforeOffset - rBracesBeforeOffset - (rBracesAfterOffset - lBracesAfterOffset);
}
}
| CPP-4184 Generate multiple '}' on enter for code with several '{' in one line (method rename)
| platform/lang-impl/src/com/intellij/codeInsight/editorActions/enter/EnterAfterUnmatchedBraceHandler.java | CPP-4184 Generate multiple '}' on enter for code with several '{' in one line (method rename) |
|
Java | apache-2.0 | 6c84356278bb55c067ae0f336cd18cc6d914faa0 | 0 | Flowdalic/Smack,Flowdalic/Smack,igniterealtime/Smack,vanitasvitae/Smack,Flowdalic/Smack,vanitasvitae/Smack,igniterealtime/Smack,vanitasvitae/Smack,igniterealtime/Smack | /**
*
* Copyright 2015-2020 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.igniterealtime.smack.inttest;
import static org.reflections.ReflectionUtils.getAllMethods;
import static org.reflections.ReflectionUtils.withAnnotation;
import static org.reflections.ReflectionUtils.withModifier;
import static org.reflections.ReflectionUtils.withParametersCount;
import static org.reflections.ReflectionUtils.withReturnType;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jivesoftware.smack.AbstractXMPPConnection;
import org.jivesoftware.smack.ConnectionConfiguration.SecurityMode;
import org.jivesoftware.smack.SmackConfiguration;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.SmackException.NoResponseException;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.tcp.XMPPTCPConnectionConfiguration;
import org.jivesoftware.smack.util.StringUtils;
import org.jivesoftware.smack.util.TLSUtils;
import org.jivesoftware.smack.util.dns.dnsjava.DNSJavaResolver;
import org.jivesoftware.smack.util.dns.javax.JavaxResolver;
import org.jivesoftware.smack.util.dns.minidns.MiniDnsResolver;
import org.jivesoftware.smackx.debugger.EnhancedDebuggerWindow;
import org.jivesoftware.smackx.iqregister.AccountManager;
import org.igniterealtime.smack.inttest.Configuration.AccountRegistration;
import org.igniterealtime.smack.inttest.annotations.AfterClass;
import org.igniterealtime.smack.inttest.annotations.BeforeClass;
import org.igniterealtime.smack.inttest.annotations.SmackIntegrationTest;
import org.reflections.Reflections;
import org.reflections.scanners.MethodAnnotationsScanner;
import org.reflections.scanners.MethodParameterScanner;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.scanners.TypeAnnotationsScanner;
public class SmackIntegrationTestFramework {
static {
TLSUtils.setDefaultTrustStoreTypeToJksIfRequired();
}
private static final Logger LOGGER = Logger.getLogger(SmackIntegrationTestFramework.class.getName());
public static boolean SINTTEST_UNIT_TEST = false;
protected final Configuration config;
protected TestRunResult testRunResult;
private SmackIntegrationTestEnvironment environment;
protected XmppConnectionManager connectionManager;
public enum TestType {
Normal,
LowLevel,
SpecificLowLevel,
}
public static void main(String[] args) throws IOException, KeyManagementException,
NoSuchAlgorithmException, SmackException, XMPPException, InterruptedException, InstantiationException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException {
Configuration config = Configuration.newConfiguration(args);
SmackIntegrationTestFramework sinttest = new SmackIntegrationTestFramework(config);
TestRunResult testRunResult = sinttest.run();
for (Entry<Class<? extends AbstractSmackIntTest>, Throwable> entry : testRunResult.impossibleTestClasses.entrySet()) {
LOGGER.info("Could not run " + entry.getKey().getName() + " because: "
+ entry.getValue().getLocalizedMessage());
}
for (TestNotPossible testNotPossible : testRunResult.impossibleIntegrationTests) {
LOGGER.info("Could not run " + testNotPossible.concreteTest + " because: "
+ testNotPossible.testNotPossibleException.getMessage());
}
for (SuccessfulTest successfulTest : testRunResult.successfulIntegrationTests) {
LOGGER.info(successfulTest.concreteTest + " ✔");
}
final int successfulTests = testRunResult.successfulIntegrationTests.size();
final int failedTests = testRunResult.failedIntegrationTests.size();
final int availableTests = testRunResult.getNumberOfAvailableTests();
LOGGER.info("SmackIntegrationTestFramework[" + testRunResult.testRunId + ']' + " finished: "
+ successfulTests + '/' + availableTests + " [" + failedTests + " failed]");
final int exitStatus;
if (failedTests > 0) {
LOGGER.warning("�� The following " + failedTests + " tests failed! ��");
for (FailedTest failedTest : testRunResult.failedIntegrationTests) {
final Throwable cause = failedTest.failureReason;
LOGGER.log(Level.SEVERE, failedTest.concreteTest + " failed: " + cause, cause);
}
exitStatus = 2;
} else {
LOGGER.info("All possible Smack Integration Tests completed successfully. \\o/");
exitStatus = 0;
}
switch (config.debugger) {
case enhanced:
EnhancedDebuggerWindow.getInstance().waitUntilClosed();
break;
default:
break;
}
System.exit(exitStatus);
}
public SmackIntegrationTestFramework(Configuration configuration) {
this.config = configuration;
}
public synchronized TestRunResult run()
throws KeyManagementException, NoSuchAlgorithmException, SmackException, IOException, XMPPException,
InterruptedException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
// The DNS resolver is not really a per sinttest run setting. It is not even a per connection setting. Instead
// it is a global setting, but we treat it like a per sinttest run setting.
switch (config.dnsResolver) {
case minidns:
MiniDnsResolver.setup();
break;
case javax:
JavaxResolver.setup();
break;
case dnsjava:
DNSJavaResolver.setup();
break;
}
testRunResult = new TestRunResult();
// Create a connection manager *after* we created the testRunId (in testRunResult).
this.connectionManager = new XmppConnectionManager(this);
LOGGER.info("SmackIntegrationTestFramework [" + testRunResult.testRunId + ']' + ": Starting\nSmack version: " + SmackConfiguration.getVersion());
if (config.debugger != Configuration.Debugger.none) {
// JUL Debugger will not print any information until configured to print log messages of
// level FINE
// TODO configure JUL for log?
SmackConfiguration.addDisabledSmackClass("org.jivesoftware.smack.debugger.JulDebugger");
SmackConfiguration.DEBUG = true;
}
if (config.replyTimeout > 0) {
SmackConfiguration.setDefaultReplyTimeout(config.replyTimeout);
}
if (config.securityMode != SecurityMode.required && config.accountRegistration == AccountRegistration.inBandRegistration) {
AccountManager.sensitiveOperationOverInsecureConnectionDefault(true);
}
// TODO print effective configuration
String[] testPackages;
if (config.testPackages == null || config.testPackages.isEmpty()) {
testPackages = new String[] { "org.jivesoftware.smackx", "org.jivesoftware.smack" };
}
else {
testPackages = config.testPackages.toArray(new String[config.testPackages.size()]);
}
Reflections reflections = new Reflections(testPackages, new SubTypesScanner(),
new TypeAnnotationsScanner(), new MethodAnnotationsScanner(), new MethodParameterScanner());
Set<Class<? extends AbstractSmackIntegrationTest>> inttestClasses = reflections.getSubTypesOf(AbstractSmackIntegrationTest.class);
Set<Class<? extends AbstractSmackLowLevelIntegrationTest>> lowLevelInttestClasses = reflections.getSubTypesOf(AbstractSmackLowLevelIntegrationTest.class);
Set<Class<? extends AbstractSmackIntTest>> classes = new HashSet<>(inttestClasses.size()
+ lowLevelInttestClasses.size());
classes.addAll(inttestClasses);
classes.addAll(lowLevelInttestClasses);
{
// Remove all abstract classes.
// TODO: This may be a good candidate for Java stream filtering once Smack is Android API 24 or higher.
Iterator<Class<? extends AbstractSmackIntTest>> it = classes.iterator();
while (it.hasNext()) {
Class<? extends AbstractSmackIntTest> clazz = it.next();
if (Modifier.isAbstract(clazz.getModifiers())) {
it.remove();
}
}
}
if (classes.isEmpty()) {
throw new IllegalStateException("No test classes found");
}
LOGGER.info("SmackIntegrationTestFramework [" + testRunResult.testRunId
+ "]: Finished scanning for tests, preparing environment");
environment = prepareEnvironment();
try {
runTests(classes);
}
catch (Throwable t) {
// Log the thrown Throwable to prevent it being shadowed in case the finally block below also throws.
LOGGER.log(Level.SEVERE, "Unexpected abort because runTests() threw throwable", t);
throw t;
}
finally {
// Ensure that the accounts are deleted and disconnected before we continue
connectionManager.disconnectAndCleanup();
}
return testRunResult;
}
@SuppressWarnings({"Finally"})
private void runTests(Set<Class<? extends AbstractSmackIntTest>> classes)
throws InterruptedException, InstantiationException, IllegalAccessException,
IllegalArgumentException, SmackException, IOException, XMPPException {
List<PreparedTest> tests = new ArrayList<>(classes.size());
int numberOfAvailableTests = 0;
for (Class<? extends AbstractSmackIntTest> testClass : classes) {
final String testClassName = testClass.getName();
// TODO: Move the whole "skipping section" below one layer up?
// Skip pseudo integration tests from src/test
// Although Smack's gradle build files do not state that the 'main' sources classpath also contains the
// 'test' classes. Some IDEs like Eclipse include them. As result, a real integration test run encounters
// pseudo integration tests like the DummySmackIntegrationTest which always throws from src/test.
// It is unclear why this apparently does not happen in the 4.3 branch, one likely cause is
// compile project(path: ":smack-omemo", configuration: "testRuntime")
// in
// smack-integration-test/build.gradle:17
// added after 4.3 was branched out with
// 1f731f6318785a84b9741280d586a61dc37ecb2e
// Now "gradle integrationTest" appear to be never affected by this, i.e., they are executed with the
// correct classpath. Plain Eclipse, i.e. Smack imported into Eclipse after "gradle eclipse", appear
// to include *all* classes. Which means those runs sooner or later try to execute
// DummySmackIntegrationTest. Eclipse with buildship, the gradle plugin for Eclipse, always excludes
// *all* src/test classes, which means they do not encounter DummySmackIntegrationTest, but this means
// that the "compile project(path: ":smack-omemo", configuration: "testRuntime")" is not respected,
// which leads to
// Exception in thread "main" java.lang.NoClassDefFoundError: org/jivesoftware/smack/test/util/FileTestUtil
// at org.jivesoftware.smackx.ox.OXSecretKeyBackupIntegrationTest.<clinit>(OXSecretKeyBackupIntegrationTest.java:66)
// See
// - https://github.com/eclipse/buildship/issues/354 (Remove test dependencies from runtime classpath)
// - https://bugs.eclipse.org/bugs/show_bug.cgi?id=482315 (Runtime classpath includes test dependencies)
// - https://discuss.gradle.org/t/main-vs-test-compile-vs-runtime-classpaths-in-eclipse-once-and-for-all-how/17403
// - https://bugs.eclipse.org/bugs/show_bug.cgi?id=376616 (Scope of dependencies has no effect on Eclipse compilation)
if (!SINTTEST_UNIT_TEST && testClassName.startsWith("org.igniterealtime.smack.inttest.unittest")) {
LOGGER.warning("Skipping integration test '" + testClassName + "' from src/test classpath (should not be in classpath)");
continue;
}
if (config.enabledTests != null && !isInSet(testClass, config.enabledTests)) {
DisabledTestClass disabledTestClass = new DisabledTestClass(testClass, "Skipping test class " + testClassName + " because it is not enabled");
testRunResult.disabledTestClasses.add(disabledTestClass);
continue;
}
if (isInSet(testClass, config.disabledTests)) {
DisabledTestClass disabledTestClass = new DisabledTestClass(testClass, "Skipping test class " + testClassName + " because it is disalbed");
testRunResult.disabledTestClasses.add(disabledTestClass);
continue;
}
final Constructor<? extends AbstractSmackIntTest> cons;
try {
cons = testClass.getConstructor(SmackIntegrationTestEnvironment.class);
}
catch (NoSuchMethodException | SecurityException e) {
throw new IllegalArgumentException(
"Smack Integration Test class does not declare the correct constructor. Is a public Constructor(SmackIntegrationTestEnvironment) missing?",
e);
}
final List<Method> smackIntegrationTestMethods;
{
Method[] testClassMethods = testClass.getMethods();
smackIntegrationTestMethods = new ArrayList<>(testClassMethods.length);
for (Method method : testClassMethods) {
if (!method.isAnnotationPresent(SmackIntegrationTest.class)) {
continue;
}
smackIntegrationTestMethods.add(method);
}
}
if (smackIntegrationTestMethods.isEmpty()) {
LOGGER.warning("No Smack integration test methods found in " + testClass);
continue;
}
final AbstractSmackIntTest test;
try {
test = cons.newInstance(environment);
}
catch (InvocationTargetException e) {
Throwable cause = e.getCause();
throwFatalException(cause);
testRunResult.impossibleTestClasses.put(testClass, cause);
continue;
}
Class<? extends AbstractXMPPConnection> specificLowLevelConnectionClass = null;
final TestType testType;
if (test instanceof AbstractSmackSpecificLowLevelIntegrationTest) {
AbstractSmackSpecificLowLevelIntegrationTest<?> specificLowLevelTest = (AbstractSmackSpecificLowLevelIntegrationTest<?>) test;
specificLowLevelConnectionClass = specificLowLevelTest.getConnectionClass();
testType = TestType.SpecificLowLevel;
} else if (test instanceof AbstractSmackLowLevelIntegrationTest) {
testType = TestType.LowLevel;
} else if (test instanceof AbstractSmackIntegrationTest) {
testType = TestType.Normal;
} else {
throw new AssertionError();
}
// Verify the method signatures, throw in case a signature is incorrect.
for (Method method : smackIntegrationTestMethods) {
Class<?> retClass = method.getReturnType();
if (!retClass.equals(Void.TYPE)) {
throw new IllegalStateException(
"SmackIntegrationTest annotation on" + method + " that does not return void");
}
switch (testType) {
case Normal:
final Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length > 0) {
throw new IllegalStateException(
"SmackIntegrationTest annotaton on " + method + " that takes arguments ");
}
break;
case LowLevel:
verifyLowLevelTestMethod(method, AbstractXMPPConnection.class);
break;
case SpecificLowLevel:
verifyLowLevelTestMethod(method, specificLowLevelConnectionClass);
break;
}
}
Iterator<Method> it = smackIntegrationTestMethods.iterator();
while (it.hasNext()) {
final Method method = it.next();
final String methodName = method.getName();
if (config.enabledTests != null && !(config.enabledTests.contains(methodName)
|| isInSet(testClass, config.enabledTests))) {
DisabledTest disabledTest = new DisabledTest(method, "Skipping test method " + methodName + " because it is not enabled");
testRunResult.disabledTests.add(disabledTest);
it.remove();
continue;
}
if (config.disabledTests != null && config.disabledTests.contains(methodName)) {
DisabledTest disabledTest = new DisabledTest(method, "Skipping test method " + methodName + " because it is disabled");
testRunResult.disabledTests.add(disabledTest);
it.remove();
continue;
}
}
if (smackIntegrationTestMethods.isEmpty()) {
LOGGER.info("All tests in " + testClassName + " are disabled");
continue;
}
List<ConcreteTest> concreteTests = new ArrayList<>(smackIntegrationTestMethods.size());
for (Method testMethod : smackIntegrationTestMethods) {
switch (testType) {
case Normal: {
ConcreteTest.Executor concreteTestExecutor = () -> testMethod.invoke(test);
ConcreteTest concreteTest = new ConcreteTest(testType, testMethod, concreteTestExecutor);
concreteTests.add(concreteTest);
}
break;
case LowLevel:
case SpecificLowLevel:
LowLevelTestMethod lowLevelTestMethod = new LowLevelTestMethod(testMethod);
switch (testType) {
case LowLevel:
List<ConcreteTest> concreteLowLevelTests = invokeLowLevel(lowLevelTestMethod, (AbstractSmackLowLevelIntegrationTest) test);
concreteTests.addAll(concreteLowLevelTests);
break;
case SpecificLowLevel: {
ConcreteTest.Executor concreteTestExecutor = () -> invokeSpecificLowLevel(
lowLevelTestMethod, (AbstractSmackSpecificLowLevelIntegrationTest<?>) test);
ConcreteTest concreteTest = new ConcreteTest(testType, testMethod, concreteTestExecutor);
concreteTests.add(concreteTest);
break;
}
default:
throw new AssertionError();
}
break;
}
}
// Instantiate the prepared test early as this will check the before and after class annotations.
PreparedTest preparedTest = new PreparedTest(test, concreteTests);
tests.add(preparedTest);
numberOfAvailableTests += concreteTests.size();
}
// Print status information.
StringBuilder sb = new StringBuilder(1024);
sb.append("Smack Integration Test Framework\n");
sb.append("################################\n");
if (config.verbose) {
sb.append('\n');
if (!testRunResult.disabledTestClasses.isEmpty()) {
sb.append("The following test classes are disabled:\n");
for (DisabledTestClass disabledTestClass : testRunResult.disabledTestClasses) {
disabledTestClass.appendTo(sb).append('\n');
}
}
if (!testRunResult.disabledTests.isEmpty()) {
sb.append("The following tests are disabled:\n");
for (DisabledTest disabledTest : testRunResult.disabledTests) {
disabledTest.appendTo(sb).append('\n');
}
}
sb.append('\n');
}
sb.append("Available tests: ").append(numberOfAvailableTests)
.append("(#-classes: ").append(testRunResult.disabledTestClasses.size())
.append(", #-tests: ").append(testRunResult.disabledTests.size())
.append(")\n");
LOGGER.info(sb.toString());
for (PreparedTest test : tests) {
test.run();
}
// Assert that all tests in the 'tests' list produced a result.
assert numberOfAvailableTests == testRunResult.getNumberOfAvailableTests();
}
private void runConcreteTest(ConcreteTest concreteTest)
throws InterruptedException, XMPPException, IOException, SmackException {
LOGGER.info(concreteTest + " Start");
long testStart = System.currentTimeMillis();
try {
concreteTest.executor.execute();
long testEnd = System.currentTimeMillis();
LOGGER.info(concreteTest + " Success");
testRunResult.successfulIntegrationTests.add(new SuccessfulTest(concreteTest, testStart, testEnd, null));
}
catch (InvocationTargetException e) {
long testEnd = System.currentTimeMillis();
Throwable cause = e.getCause();
if (cause instanceof TestNotPossibleException) {
LOGGER.info(concreteTest + " is not possible");
testRunResult.impossibleIntegrationTests.add(new TestNotPossible(concreteTest, testStart, testEnd,
null, (TestNotPossibleException) cause));
return;
}
Throwable nonFatalFailureReason;
// junit assert's throw an AssertionError if they fail, those should not be
// thrown up, as it would be done by throwFatalException()
if (cause instanceof AssertionError) {
nonFatalFailureReason = cause;
} else {
nonFatalFailureReason = throwFatalException(cause);
}
// An integration test failed
testRunResult.failedIntegrationTests.add(new FailedTest(concreteTest, testStart, testEnd, null,
nonFatalFailureReason));
LOGGER.log(Level.SEVERE, concreteTest + " Failed", e);
}
catch (IllegalArgumentException | IllegalAccessException e) {
throw new AssertionError(e);
}
}
private static void verifyLowLevelTestMethod(Method method,
Class<? extends AbstractXMPPConnection> connectionClass) {
if (!testMethodParametersIsListOfConnections(method, connectionClass)
&& !testMethodParametersVarargsConnections(method, connectionClass)) {
throw new IllegalArgumentException(method + " is not a valid low level test method");
}
}
private List<ConcreteTest> invokeLowLevel(LowLevelTestMethod lowLevelTestMethod, AbstractSmackLowLevelIntegrationTest test) {
Collection<? extends XmppConnectionDescriptor<?, ?, ?>> connectionDescriptors;
if (lowLevelTestMethod.smackIntegrationTestAnnotation.onlyDefaultConnectionType()) {
XmppConnectionDescriptor<?, ?, ?> defaultConnectionDescriptor = connectionManager.getDefaultConnectionDescriptor();
connectionDescriptors = Collections.singleton(defaultConnectionDescriptor);
} else {
connectionDescriptors = connectionManager.getConnectionDescriptors();
}
List<ConcreteTest> resultingConcreteTests = new ArrayList<>(connectionDescriptors.size());
for (XmppConnectionDescriptor<?, ?, ?> connectionDescriptor : connectionDescriptors) {
String connectionNick = connectionDescriptor.getNickname();
if (config.enabledConnections != null && !config.enabledConnections.contains(connectionNick)) {
DisabledTest disabledTest = new DisabledTest(lowLevelTestMethod.testMethod, "Not creating test for " + lowLevelTestMethod + " with connection '" + connectionNick
+ "', as this connection type is not enabled");
testRunResult.disabledTests.add(disabledTest);
continue;
}
if (config.disabledConnections != null && config.disabledConnections.contains(connectionNick)) {
DisabledTest disabledTest = new DisabledTest(lowLevelTestMethod.testMethod, "Not creating test for " + lowLevelTestMethod + " with connection '" + connectionNick
+ ", as this connection type is disabled");
testRunResult.disabledTests.add(disabledTest);
continue;
}
Class<? extends AbstractXMPPConnection> connectionClass = connectionDescriptor.getConnectionClass();
ConcreteTest.Executor executor = () -> lowLevelTestMethod.invoke(test, connectionClass);
ConcreteTest concreteTest = new ConcreteTest(TestType.LowLevel, lowLevelTestMethod.testMethod, executor, connectionClass.getSimpleName());
resultingConcreteTests.add(concreteTest);
}
return resultingConcreteTests;
}
private static <C extends AbstractXMPPConnection> void invokeSpecificLowLevel(LowLevelTestMethod testMethod,
AbstractSmackSpecificLowLevelIntegrationTest<C> test)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException, InterruptedException,
SmackException, IOException, XMPPException {
if (testMethod.smackIntegrationTestAnnotation.onlyDefaultConnectionType()) {
throw new IllegalArgumentException("SpecificLowLevelTests must not have set onlyDefaultConnectionType");
}
Class<C> connectionClass = test.getConnectionClass();
testMethod.invoke(test, connectionClass);
}
protected SmackIntegrationTestEnvironment prepareEnvironment() throws SmackException,
IOException, XMPPException, InterruptedException, KeyManagementException,
NoSuchAlgorithmException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
return connectionManager.prepareEnvironment();
}
enum AccountNum {
One,
Two,
Three,
}
static XMPPTCPConnectionConfiguration.Builder getConnectionConfigurationBuilder(Configuration config) {
XMPPTCPConnectionConfiguration.Builder builder = XMPPTCPConnectionConfiguration.builder();
config.configurationApplier.applyConfigurationTo(builder);
return builder;
}
private static Exception throwFatalException(Throwable e) throws Error, NoResponseException,
InterruptedException {
if (e instanceof InterruptedException) {
throw (InterruptedException) e;
}
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
if (e instanceof Error) {
throw (Error) e;
}
return (Exception) e;
}
private static boolean isInSet(Class<?> clz, Set<String> classes) {
if (classes == null) {
return false;
}
final String className = clz.getName();
final String unqualifiedClassName = clz.getSimpleName();
return classes.contains(className) || classes.contains(unqualifiedClassName);
}
public static final class TestRunResult {
/**
* A short String of lowercase characters and numbers used to identify a integration test
* run. We use lowercase characters because this string will eventually be part of the
* localpart of the used JIDs (and the localpart is case insensitive).
*/
public final String testRunId = StringUtils.insecureRandomString(5).toLowerCase(Locale.US);
private final List<SuccessfulTest> successfulIntegrationTests = Collections.synchronizedList(new LinkedList<SuccessfulTest>());
private final List<FailedTest> failedIntegrationTests = Collections.synchronizedList(new LinkedList<FailedTest>());
private final List<TestNotPossible> impossibleIntegrationTests = Collections.synchronizedList(new LinkedList<TestNotPossible>());
// TODO: Ideally three would only be a list of disabledTests, but since we do not process a disabled test class
// any further, we can not determine the concrete disabled tests.
private final List<DisabledTestClass> disabledTestClasses = Collections.synchronizedList(new ArrayList<>());
private final List<DisabledTest> disabledTests = Collections.synchronizedList(new ArrayList<>());
private final Map<Class<? extends AbstractSmackIntTest>, Throwable> impossibleTestClasses = new HashMap<>();
TestRunResult() {
}
public String getTestRunId() {
return testRunId;
}
public int getNumberOfAvailableTests() {
return successfulIntegrationTests.size() + failedIntegrationTests.size() + impossibleIntegrationTests.size();
}
public List<SuccessfulTest> getSuccessfulTests() {
return Collections.unmodifiableList(successfulIntegrationTests);
}
public List<FailedTest> getFailedTests() {
return Collections.unmodifiableList(failedIntegrationTests);
}
public List<TestNotPossible> getNotPossibleTests() {
return Collections.unmodifiableList(impossibleIntegrationTests);
}
public Map<Class<? extends AbstractSmackIntTest>, Throwable> getImpossibleTestClasses() {
return Collections.unmodifiableMap(impossibleTestClasses);
}
}
final class PreparedTest {
private final AbstractSmackIntTest test;
private final List<ConcreteTest> concreteTests;
private final Method beforeClassMethod;
private final Method afterClassMethod;
private PreparedTest(AbstractSmackIntTest test, List<ConcreteTest> concreteTests) {
this.test = test;
this.concreteTests = concreteTests;
Class<? extends AbstractSmackIntTest> testClass = test.getClass();
beforeClassMethod = getSinttestSpecialMethod(testClass, BeforeClass.class);
afterClassMethod = getSinttestSpecialMethod(testClass, AfterClass.class);
}
public void run() throws InterruptedException, XMPPException, IOException, SmackException {
try {
// Run the @BeforeClass methods (if any)
executeSinttestSpecialMethod(beforeClassMethod);
for (ConcreteTest concreteTest : concreteTests) {
runConcreteTest(concreteTest);
}
}
finally {
executeSinttestSpecialMethod(afterClassMethod);
}
}
private void executeSinttestSpecialMethod(Method method) {
if (method == null) {
return;
}
try {
method.invoke(test);
}
catch (InvocationTargetException | IllegalAccessException e) {
LOGGER.log(Level.SEVERE, "Exception executing " + method, e);
}
catch (IllegalArgumentException e) {
throw new AssertionError(e);
}
}
}
@SuppressWarnings("unchecked")
private static Method getSinttestSpecialMethod(Class<? extends AbstractSmackIntTest> testClass, Class<? extends Annotation> annotation) {
Set<Method> specialClassMethods = getAllMethods(testClass,
withAnnotation(annotation), withReturnType(Void.TYPE),
withParametersCount(0), withModifier(Modifier.PUBLIC
));
// See if there are any methods that have a special but a wrong signature
Set<Method> allSpecialClassMethods = getAllMethods(testClass, withAnnotation(annotation));
allSpecialClassMethods.removeAll(specialClassMethods);
if (!allSpecialClassMethods.isEmpty()) {
throw new IllegalArgumentException(annotation + " methods with wrong signature found");
}
if (specialClassMethods.size() == 1) {
return specialClassMethods.iterator().next();
}
else if (specialClassMethods.size() > 1) {
throw new IllegalArgumentException("Only one @BeforeClass method allowed");
}
return null;
}
static final class ConcreteTest {
private final TestType testType;
private final Method method;
private final Executor executor;
private final String[] subdescriptons;
private ConcreteTest(TestType testType, Method method, Executor executor, String... subdescriptions) {
this.testType = testType;
this.method = method;
this.executor = executor;
this.subdescriptons = subdescriptions;
}
private transient String stringCache;
@Override
public String toString() {
if (stringCache != null) {
return stringCache;
}
StringBuilder sb = new StringBuilder();
sb.append(method.getDeclaringClass().getSimpleName())
.append('.')
.append(method.getName())
.append(" (")
.append(testType.name());
if (subdescriptons != null && subdescriptons.length > 0) {
sb.append(", ");
StringUtils.appendTo(Arrays.asList(subdescriptons), sb);
}
sb.append(')');
stringCache = sb.toString();
return stringCache;
}
private interface Executor {
/**
* Execute the test.
*
* @throws IllegalAccessException
* @throws InterruptedException if the calling thread was interrupted.
* @throws InvocationTargetException if the reflective invoked test throws an exception.
* @throws XMPPException in case an XMPPException happens when <em>preparing</em> the test.
* @throws IOException in case an IOException happens when <em>preparing</em> the test.
* @throws SmackException in case an SmackException happens when <em>preparing</em> the test.
*/
void execute() throws IllegalAccessException, InterruptedException, InvocationTargetException,
XMPPException, IOException, SmackException;
}
}
public static final class DisabledTestClass {
private final Class<? extends AbstractSmackIntTest> testClass;
private final String reason;
private DisabledTestClass(Class<? extends AbstractSmackIntTest> testClass, String reason) {
this.testClass = testClass;
this.reason = reason;
}
public Class<? extends AbstractSmackIntTest> getTestClass() {
return testClass;
}
public String getReason() {
return reason;
}
public StringBuilder appendTo(StringBuilder sb) {
return sb.append("Disabled ").append(testClass).append(" because ").append(reason);
}
}
public static final class DisabledTest {
private final Method method;
private final String reason;
private DisabledTest(Method method, String reason) {
this.method = method;
this.reason = reason;
}
public Method getMethod() {
return method;
}
public String getReason() {
return reason;
}
public StringBuilder appendTo(StringBuilder sb) {
return sb.append("Disabled ").append(method).append(" because ").append(reason);
}
}
private final class LowLevelTestMethod {
private final Method testMethod;
private final SmackIntegrationTest smackIntegrationTestAnnotation;
private final boolean parameterListOfConnections;
private LowLevelTestMethod(Method testMethod) {
this.testMethod = testMethod;
smackIntegrationTestAnnotation = testMethod.getAnnotation(SmackIntegrationTest.class);
assert smackIntegrationTestAnnotation != null;
parameterListOfConnections = testMethodParametersIsListOfConnections(testMethod);
}
// TODO: The second parameter should probably be a connection descriptor?
private void invoke(AbstractSmackLowLevelIntegrationTest test,
Class<? extends AbstractXMPPConnection> connectionClass)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException,
InterruptedException, SmackException, IOException, XMPPException {
final int connectionCount;
if (parameterListOfConnections) {
connectionCount = smackIntegrationTestAnnotation.connectionCount();
if (connectionCount < 1) {
throw new IllegalArgumentException(testMethod + " is annotated to use less than one connection ('"
+ connectionCount + ')');
}
} else {
connectionCount = testMethod.getParameterCount();
}
List<? extends AbstractXMPPConnection> connections = connectionManager.constructConnectedConnections(
connectionClass, connectionCount);
if (parameterListOfConnections) {
testMethod.invoke(test, connections);
} else {
Object[] connectionsArray = new Object[connectionCount];
for (int i = 0; i < connectionsArray.length; i++) {
connectionsArray[i] = connections.remove(0);
}
testMethod.invoke(test, connectionsArray);
}
connectionManager.recycle(connections);
}
@Override
public String toString() {
return testMethod.toString();
}
}
private static boolean testMethodParametersIsListOfConnections(Method testMethod) {
return testMethodParametersIsListOfConnections(testMethod, AbstractXMPPConnection.class);
}
static boolean testMethodParametersIsListOfConnections(Method testMethod, Class<? extends AbstractXMPPConnection> connectionClass) {
Type[] parameterTypes = testMethod.getGenericParameterTypes();
if (parameterTypes.length != 1) {
return false;
}
Class<?> soleParameter = testMethod.getParameterTypes()[0];
if (!Collection.class.isAssignableFrom(soleParameter)) {
return false;
}
ParameterizedType soleParameterizedType = (ParameterizedType) parameterTypes[0];
Type[] actualTypeArguments = soleParameterizedType.getActualTypeArguments();
if (actualTypeArguments.length != 1) {
return false;
}
Type soleActualTypeArgument = actualTypeArguments[0];
if (!(soleActualTypeArgument instanceof Class<?>)) {
return false;
}
Class<?> soleActualTypeArgumentAsClass = (Class<?>) soleActualTypeArgument;
if (!connectionClass.isAssignableFrom(soleActualTypeArgumentAsClass)) {
return false;
}
return true;
}
static boolean testMethodParametersVarargsConnections(Method testMethod, Class<? extends AbstractXMPPConnection> connectionClass) {
Class<?>[] parameterTypes = testMethod.getParameterTypes();
for (Class<?> parameterType : parameterTypes) {
if (!parameterType.isAssignableFrom(connectionClass)) {
return false;
}
}
return true;
}
}
| smack-integration-test/src/main/java/org/igniterealtime/smack/inttest/SmackIntegrationTestFramework.java | /**
*
* Copyright 2015-2020 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.igniterealtime.smack.inttest;
import static org.reflections.ReflectionUtils.getAllMethods;
import static org.reflections.ReflectionUtils.withAnnotation;
import static org.reflections.ReflectionUtils.withModifier;
import static org.reflections.ReflectionUtils.withParametersCount;
import static org.reflections.ReflectionUtils.withReturnType;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jivesoftware.smack.AbstractXMPPConnection;
import org.jivesoftware.smack.ConnectionConfiguration.SecurityMode;
import org.jivesoftware.smack.SmackConfiguration;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.SmackException.NoResponseException;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.tcp.XMPPTCPConnectionConfiguration;
import org.jivesoftware.smack.util.StringUtils;
import org.jivesoftware.smack.util.TLSUtils;
import org.jivesoftware.smack.util.dns.dnsjava.DNSJavaResolver;
import org.jivesoftware.smack.util.dns.javax.JavaxResolver;
import org.jivesoftware.smack.util.dns.minidns.MiniDnsResolver;
import org.jivesoftware.smackx.debugger.EnhancedDebuggerWindow;
import org.jivesoftware.smackx.iqregister.AccountManager;
import org.igniterealtime.smack.inttest.Configuration.AccountRegistration;
import org.igniterealtime.smack.inttest.annotations.AfterClass;
import org.igniterealtime.smack.inttest.annotations.BeforeClass;
import org.igniterealtime.smack.inttest.annotations.SmackIntegrationTest;
import org.reflections.Reflections;
import org.reflections.scanners.MethodAnnotationsScanner;
import org.reflections.scanners.MethodParameterScanner;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.scanners.TypeAnnotationsScanner;
public class SmackIntegrationTestFramework {
static {
TLSUtils.setDefaultTrustStoreTypeToJksIfRequired();
}
private static final Logger LOGGER = Logger.getLogger(SmackIntegrationTestFramework.class.getName());
public static boolean SINTTEST_UNIT_TEST = false;
protected final Configuration config;
protected TestRunResult testRunResult;
private SmackIntegrationTestEnvironment environment;
protected XmppConnectionManager connectionManager;
public enum TestType {
Normal,
LowLevel,
SpecificLowLevel,
}
public static void main(String[] args) throws IOException, KeyManagementException,
NoSuchAlgorithmException, SmackException, XMPPException, InterruptedException, InstantiationException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException {
Configuration config = Configuration.newConfiguration(args);
SmackIntegrationTestFramework sinttest = new SmackIntegrationTestFramework(config);
TestRunResult testRunResult = sinttest.run();
for (Entry<Class<? extends AbstractSmackIntTest>, Throwable> entry : testRunResult.impossibleTestClasses.entrySet()) {
LOGGER.info("Could not run " + entry.getKey().getName() + " because: "
+ entry.getValue().getLocalizedMessage());
}
for (TestNotPossible testNotPossible : testRunResult.impossibleIntegrationTests) {
LOGGER.info("Could not run " + testNotPossible.concreteTest + " because: "
+ testNotPossible.testNotPossibleException.getMessage());
}
for (SuccessfulTest successfulTest : testRunResult.successfulIntegrationTests) {
LOGGER.info(successfulTest.concreteTest + " ✔");
}
final int successfulTests = testRunResult.successfulIntegrationTests.size();
final int failedTests = testRunResult.failedIntegrationTests.size();
final int availableTests = testRunResult.getNumberOfAvailableTests();
LOGGER.info("SmackIntegrationTestFramework[" + testRunResult.testRunId + ']' + " finished: "
+ successfulTests + '/' + availableTests + " [" + failedTests + " failed]");
final int exitStatus;
if (failedTests > 0) {
LOGGER.warning("�� The following " + failedTests + " tests failed! ��");
for (FailedTest failedTest : testRunResult.failedIntegrationTests) {
final Throwable cause = failedTest.failureReason;
LOGGER.log(Level.SEVERE, failedTest.concreteTest + " failed: " + cause, cause);
}
exitStatus = 2;
} else {
LOGGER.info("All possible Smack Integration Tests completed successfully. \\o/");
exitStatus = 0;
}
switch (config.debugger) {
case enhanced:
EnhancedDebuggerWindow.getInstance().waitUntilClosed();
break;
default:
break;
}
System.exit(exitStatus);
}
public SmackIntegrationTestFramework(Configuration configuration) {
this.config = configuration;
}
public synchronized TestRunResult run()
throws KeyManagementException, NoSuchAlgorithmException, SmackException, IOException, XMPPException,
InterruptedException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
// The DNS resolver is not really a per sinttest run setting. It is not even a per connection setting. Instead
// it is a global setting, but we treat it like a per sinttest run setting.
switch (config.dnsResolver) {
case minidns:
MiniDnsResolver.setup();
break;
case javax:
JavaxResolver.setup();
break;
case dnsjava:
DNSJavaResolver.setup();
break;
}
testRunResult = new TestRunResult();
// Create a connection manager *after* we created the testRunId (in testRunResult).
this.connectionManager = new XmppConnectionManager(this);
LOGGER.info("SmackIntegrationTestFramework [" + testRunResult.testRunId + ']' + ": Starting\nSmack version: " + SmackConfiguration.getVersion());
if (config.debugger != Configuration.Debugger.none) {
// JUL Debugger will not print any information until configured to print log messages of
// level FINE
// TODO configure JUL for log?
SmackConfiguration.addDisabledSmackClass("org.jivesoftware.smack.debugger.JulDebugger");
SmackConfiguration.DEBUG = true;
}
if (config.replyTimeout > 0) {
SmackConfiguration.setDefaultReplyTimeout(config.replyTimeout);
}
if (config.securityMode != SecurityMode.required && config.accountRegistration == AccountRegistration.inBandRegistration) {
AccountManager.sensitiveOperationOverInsecureConnectionDefault(true);
}
// TODO print effective configuration
String[] testPackages;
if (config.testPackages == null || config.testPackages.isEmpty()) {
testPackages = new String[] { "org.jivesoftware.smackx", "org.jivesoftware.smack" };
}
else {
testPackages = config.testPackages.toArray(new String[config.testPackages.size()]);
}
Reflections reflections = new Reflections(testPackages, new SubTypesScanner(),
new TypeAnnotationsScanner(), new MethodAnnotationsScanner(), new MethodParameterScanner());
Set<Class<? extends AbstractSmackIntegrationTest>> inttestClasses = reflections.getSubTypesOf(AbstractSmackIntegrationTest.class);
Set<Class<? extends AbstractSmackLowLevelIntegrationTest>> lowLevelInttestClasses = reflections.getSubTypesOf(AbstractSmackLowLevelIntegrationTest.class);
Set<Class<? extends AbstractSmackIntTest>> classes = new HashSet<>(inttestClasses.size()
+ lowLevelInttestClasses.size());
classes.addAll(inttestClasses);
classes.addAll(lowLevelInttestClasses);
{
// Remove all abstract classes.
// TODO: This may be a good candidate for Java stream filtering once Smack is Android API 24 or higher.
Iterator<Class<? extends AbstractSmackIntTest>> it = classes.iterator();
while (it.hasNext()) {
Class<? extends AbstractSmackIntTest> clazz = it.next();
if (Modifier.isAbstract(clazz.getModifiers())) {
it.remove();
}
}
}
if (classes.isEmpty()) {
throw new IllegalStateException("No test classes found");
}
LOGGER.info("SmackIntegrationTestFramework [" + testRunResult.testRunId
+ "]: Finished scanning for tests, preparing environment");
environment = prepareEnvironment();
try {
runTests(classes);
}
catch (Throwable t) {
// Log the thrown Throwable to prevent it being shadowed in case the finally block below also throws.
LOGGER.log(Level.SEVERE, "Unexpected abort because runTests() threw throwable", t);
throw t;
}
finally {
// Ensure that the accounts are deleted and disconnected before we continue
connectionManager.disconnectAndCleanup();
}
return testRunResult;
}
@SuppressWarnings({"Finally"})
private void runTests(Set<Class<? extends AbstractSmackIntTest>> classes)
throws InterruptedException, InstantiationException, IllegalAccessException,
IllegalArgumentException, SmackException, IOException, XMPPException {
List<PreparedTest> tests = new ArrayList<>(classes.size());
int numberOfAvailableTests = 0;
for (Class<? extends AbstractSmackIntTest> testClass : classes) {
final String testClassName = testClass.getName();
// TODO: Move the whole "skipping section" below one layer up?
// Skip pseudo integration tests from src/test
// Although Smack's gradle build files do not state that the 'main' sources classpath also contains the
// 'test' classes. Some IDEs like Eclipse include them. As result, a real integration test run encounters
// pseudo integration tests like the DummySmackIntegrationTest which always throws from src/test.
// It is unclear why this apparently does not happen in the 4.3 branch, one likely cause is
// compile project(path: ":smack-omemo", configuration: "testRuntime")
// in
// smack-integration-test/build.gradle:17
// added after 4.3 was branched out with
// 1f731f6318785a84b9741280d586a61dc37ecb2e
// Now "gradle integrationTest" appear to be never affected by this, i.e., they are executed with the
// correct classpath. Plain Eclipse, i.e. Smack imported into Eclipse after "gradle eclipse", appear
// to include *all* classes. Which means those runs sooner or later try to execute
// DummySmackIntegrationTest. Eclipse with buildship, the gradle plugin for Eclipse, always excludes
// *all* src/test classes, which means they do not encounter DummySmackIntegrationTest, but this means
// that the "compile project(path: ":smack-omemo", configuration: "testRuntime")" is not respected,
// which leads to
// Exception in thread "main" java.lang.NoClassDefFoundError: org/jivesoftware/smack/test/util/FileTestUtil
// at org.jivesoftware.smackx.ox.OXSecretKeyBackupIntegrationTest.<clinit>(OXSecretKeyBackupIntegrationTest.java:66)
// See
// - https://github.com/eclipse/buildship/issues/354 (Remove test dependencies from runtime classpath)
// - https://bugs.eclipse.org/bugs/show_bug.cgi?id=482315 (Runtime classpath includes test dependencies)
// - https://discuss.gradle.org/t/main-vs-test-compile-vs-runtime-classpaths-in-eclipse-once-and-for-all-how/17403
// - https://bugs.eclipse.org/bugs/show_bug.cgi?id=376616 (Scope of dependencies has no effect on Eclipse compilation)
if (!SINTTEST_UNIT_TEST && testClassName.startsWith("org.igniterealtime.smack.inttest.unittest")) {
LOGGER.warning("Skipping integration test '" + testClassName + "' from src/test classpath (should not be in classpath)");
continue;
}
if (config.enabledTests != null && !isInSet(testClass, config.enabledTests)) {
DisabledTestClass disabledTestClass = new DisabledTestClass(testClass, "Skipping test class " + testClassName + " because it is not enabled");
testRunResult.disabledTestClasses.add(disabledTestClass);
continue;
}
if (isInSet(testClass, config.disabledTests)) {
DisabledTestClass disabledTestClass = new DisabledTestClass(testClass, "Skipping test class " + testClassName + " because it is disalbed");
testRunResult.disabledTestClasses.add(disabledTestClass);
continue;
}
final Constructor<? extends AbstractSmackIntTest> cons;
try {
cons = testClass.getConstructor(SmackIntegrationTestEnvironment.class);
}
catch (NoSuchMethodException | SecurityException e) {
throw new IllegalArgumentException(
"Smack Integration Test class does not declare the correct constructor. Is a public Constructor(SmackIntegrationTestEnvironment) missing?",
e);
}
final List<Method> smackIntegrationTestMethods;
{
Method[] testClassMethods = testClass.getMethods();
smackIntegrationTestMethods = new ArrayList<>(testClassMethods.length);
for (Method method : testClassMethods) {
if (!method.isAnnotationPresent(SmackIntegrationTest.class)) {
continue;
}
smackIntegrationTestMethods.add(method);
}
}
if (smackIntegrationTestMethods.isEmpty()) {
LOGGER.warning("No Smack integration test methods found in " + testClass);
continue;
}
final AbstractSmackIntTest test;
try {
test = cons.newInstance(environment);
}
catch (InvocationTargetException e) {
Throwable cause = e.getCause();
throwFatalException(cause);
testRunResult.impossibleTestClasses.put(testClass, cause);
continue;
}
Class<? extends AbstractXMPPConnection> specificLowLevelConnectionClass = null;
final TestType testType;
if (test instanceof AbstractSmackSpecificLowLevelIntegrationTest) {
AbstractSmackSpecificLowLevelIntegrationTest<?> specificLowLevelTest = (AbstractSmackSpecificLowLevelIntegrationTest<?>) test;
specificLowLevelConnectionClass = specificLowLevelTest.getConnectionClass();
testType = TestType.SpecificLowLevel;
} else if (test instanceof AbstractSmackLowLevelIntegrationTest) {
testType = TestType.LowLevel;
} else if (test instanceof AbstractSmackIntegrationTest) {
testType = TestType.Normal;
} else {
throw new AssertionError();
}
// Verify the method signatures, throw in case a signature is incorrect.
for (Method method : smackIntegrationTestMethods) {
Class<?> retClass = method.getReturnType();
if (!retClass.equals(Void.TYPE)) {
throw new IllegalStateException(
"SmackIntegrationTest annotation on" + method + " that does not return void");
}
switch (testType) {
case Normal:
final Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length > 0) {
throw new IllegalStateException(
"SmackIntegrationTest annotaton on " + method + " that takes arguments ");
}
break;
case LowLevel:
verifyLowLevelTestMethod(method, AbstractXMPPConnection.class);
break;
case SpecificLowLevel:
verifyLowLevelTestMethod(method, specificLowLevelConnectionClass);
break;
}
}
Iterator<Method> it = smackIntegrationTestMethods.iterator();
while (it.hasNext()) {
final Method method = it.next();
final String methodName = method.getName();
if (config.enabledTests != null && !(config.enabledTests.contains(methodName)
|| isInSet(testClass, config.enabledTests))) {
DisabledTest disabledTest = new DisabledTest(method, "Skipping test method " + methodName + " because it is not enabled");
testRunResult.disabledTests.add(disabledTest);
it.remove();
continue;
}
if (config.disabledTests != null && config.disabledTests.contains(methodName)) {
DisabledTest disabledTest = new DisabledTest(method, "Skipping test method " + methodName + " because it is disabled");
testRunResult.disabledTests.add(disabledTest);
it.remove();
continue;
}
}
if (smackIntegrationTestMethods.isEmpty()) {
LOGGER.info("All tests in " + testClassName + " are disabled");
continue;
}
List<ConcreteTest> concreteTests = new ArrayList<>(smackIntegrationTestMethods.size());
for (Method testMethod : smackIntegrationTestMethods) {
switch (testType) {
case Normal: {
ConcreteTest.Executor concreteTestExecutor = () -> testMethod.invoke(test);
ConcreteTest concreteTest = new ConcreteTest(testType, testMethod, concreteTestExecutor);
concreteTests.add(concreteTest);
}
break;
case LowLevel:
case SpecificLowLevel:
LowLevelTestMethod lowLevelTestMethod = new LowLevelTestMethod(testMethod);
switch (testType) {
case LowLevel:
List<ConcreteTest> concreteLowLevelTests = invokeLowLevel(lowLevelTestMethod, (AbstractSmackLowLevelIntegrationTest) test);
concreteTests.addAll(concreteLowLevelTests);
break;
case SpecificLowLevel: {
ConcreteTest.Executor concreteTestExecutor = () -> invokeSpecificLowLevel(
lowLevelTestMethod, (AbstractSmackSpecificLowLevelIntegrationTest<?>) test);
ConcreteTest concreteTest = new ConcreteTest(testType, testMethod, concreteTestExecutor);
concreteTests.add(concreteTest);
break;
}
default:
throw new AssertionError();
}
break;
}
}
// Instantiate the prepared test early as this will check the before and after class annotations.
PreparedTest preparedTest = new PreparedTest(test, concreteTests);
tests.add(preparedTest);
numberOfAvailableTests += concreteTests.size();
}
// Print status information.
StringBuilder sb = new StringBuilder(1024);
sb.append("Smack Integration Test Framework\n");
sb.append("################################\n");
if (config.verbose) {
sb.append('\n');
if (!testRunResult.disabledTestClasses.isEmpty()) {
sb.append("The following test classes are disabled:\n");
for (DisabledTestClass disabledTestClass : testRunResult.disabledTestClasses) {
disabledTestClass.appendTo(sb).append('\n');
}
}
if (!testRunResult.disabledTests.isEmpty()) {
sb.append("The following tests are disabled:\n");
for (DisabledTest disabledTest : testRunResult.disabledTests) {
disabledTest.appendTo(sb).append('\n');
}
}
sb.append('\n');
}
sb.append("Available tests: ").append(numberOfAvailableTests)
.append("(#-classes: ").append(testRunResult.disabledTestClasses.size())
.append(", #-tests: ").append(testRunResult.disabledTests.size())
.append(")\n");
LOGGER.info(sb.toString());
for (PreparedTest test : tests) {
test.run();
}
// Assert that all tests in the 'tests' list produced a result.
assert numberOfAvailableTests == testRunResult.getNumberOfAvailableTests();
}
private void runConcreteTest(ConcreteTest concreteTest)
throws InterruptedException, XMPPException, IOException, SmackException {
LOGGER.info(concreteTest + " Start");
long testStart = System.currentTimeMillis();
try {
concreteTest.executor.execute();
long testEnd = System.currentTimeMillis();
LOGGER.info(concreteTest + " Success");
testRunResult.successfulIntegrationTests.add(new SuccessfulTest(concreteTest, testStart, testEnd, null));
}
catch (InvocationTargetException e) {
long testEnd = System.currentTimeMillis();
Throwable cause = e.getCause();
if (cause instanceof TestNotPossibleException) {
LOGGER.info(concreteTest + " is not possible");
testRunResult.impossibleIntegrationTests.add(new TestNotPossible(concreteTest, testStart, testEnd,
null, (TestNotPossibleException) cause));
return;
}
Throwable nonFatalFailureReason;
// junit assert's throw an AssertionError if they fail, those should not be
// thrown up, as it would be done by throwFatalException()
if (cause instanceof AssertionError) {
nonFatalFailureReason = cause;
} else {
nonFatalFailureReason = throwFatalException(cause);
}
// An integration test failed
testRunResult.failedIntegrationTests.add(new FailedTest(concreteTest, testStart, testEnd, null,
nonFatalFailureReason));
LOGGER.log(Level.SEVERE, concreteTest + " Failed", e);
}
catch (IllegalArgumentException | IllegalAccessException e) {
throw new AssertionError(e);
}
}
private static void verifyLowLevelTestMethod(Method method,
Class<? extends AbstractXMPPConnection> connectionClass) {
if (!testMethodParametersIsListOfConnections(method, connectionClass)
&& !testMethodParametersVarargsConnections(method, connectionClass)) {
throw new IllegalArgumentException(method + " is not a valid low level test method");
}
}
private List<ConcreteTest> invokeLowLevel(LowLevelTestMethod lowLevelTestMethod, AbstractSmackLowLevelIntegrationTest test) {
Collection<? extends XmppConnectionDescriptor<?, ?, ?>> connectionDescriptors;
if (lowLevelTestMethod.smackIntegrationTestAnnotation.onlyDefaultConnectionType()) {
XmppConnectionDescriptor<?, ?, ?> defaultConnectionDescriptor = connectionManager.getDefaultConnectionDescriptor();
connectionDescriptors = Collections.singleton(defaultConnectionDescriptor);
} else {
connectionDescriptors = connectionManager.getConnectionDescriptors();
}
List<ConcreteTest> resultingConcreteTests = new ArrayList<>(connectionDescriptors.size());
for (XmppConnectionDescriptor<?, ?, ?> connectionDescriptor : connectionDescriptors) {
String connectionNick = connectionDescriptor.getNickname();
if (config.enabledConnections != null && !config.enabledConnections.contains(connectionNick)) {
DisabledTest disabledTest = new DisabledTest(lowLevelTestMethod.testMethod, "Not creating test for " + lowLevelTestMethod + " with connection '" + connectionNick
+ "', as this connection type is not enabled");
testRunResult.disabledTests.add(disabledTest);
continue;
}
if (config.disabledConnections != null && config.disabledConnections.contains(connectionNick)) {
DisabledTest disabledTest = new DisabledTest(lowLevelTestMethod.testMethod, "Not creating test for " + lowLevelTestMethod + " with connection '" + connectionNick
+ ", as this connection type is disabled");
testRunResult.disabledTests.add(disabledTest);
continue;
}
Class<? extends AbstractXMPPConnection> connectionClass = connectionDescriptor.getConnectionClass();
ConcreteTest.Executor executor = () -> lowLevelTestMethod.invoke(test, connectionClass);
ConcreteTest concreteTest = new ConcreteTest(TestType.LowLevel, lowLevelTestMethod.testMethod, executor, connectionClass.getSimpleName());
resultingConcreteTests.add(concreteTest);
}
return resultingConcreteTests;
}
private static <C extends AbstractXMPPConnection> void invokeSpecificLowLevel(LowLevelTestMethod testMethod,
AbstractSmackSpecificLowLevelIntegrationTest<C> test)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException, InterruptedException,
SmackException, IOException, XMPPException {
if (testMethod.smackIntegrationTestAnnotation.onlyDefaultConnectionType()) {
throw new IllegalArgumentException("SpecificLowLevelTests must not have set onlyDefaultConnectionType");
}
Class<C> connectionClass = test.getConnectionClass();
testMethod.invoke(test, connectionClass);
}
protected SmackIntegrationTestEnvironment prepareEnvironment() throws SmackException,
IOException, XMPPException, InterruptedException, KeyManagementException,
NoSuchAlgorithmException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
return connectionManager.prepareEnvironment();
}
enum AccountNum {
One,
Two,
Three,
}
static XMPPTCPConnectionConfiguration.Builder getConnectionConfigurationBuilder(Configuration config) {
XMPPTCPConnectionConfiguration.Builder builder = XMPPTCPConnectionConfiguration.builder();
config.configurationApplier.applyConfigurationTo(builder);
return builder;
}
private static Exception throwFatalException(Throwable e) throws Error, NoResponseException,
InterruptedException {
if (e instanceof InterruptedException) {
throw (InterruptedException) e;
}
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
if (e instanceof Error) {
throw (Error) e;
}
return (Exception) e;
}
private static boolean isInSet(Class<?> clz, Set<String> classes) {
if (classes == null) {
return false;
}
final String className = clz.getName();
final String unqualifiedClassName = clz.getSimpleName();
return classes.contains(className) || classes.contains(unqualifiedClassName);
}
public static final class TestRunResult {
/**
* A short String of lowercase characters and numbers used to identify a integration test
* run. We use lowercase characters because this string will eventually be part of the
* localpart of the used JIDs (and the localpart is case insensitive).
*/
public final String testRunId = StringUtils.insecureRandomString(5).toLowerCase(Locale.US);
private final List<SuccessfulTest> successfulIntegrationTests = Collections.synchronizedList(new LinkedList<SuccessfulTest>());
private final List<FailedTest> failedIntegrationTests = Collections.synchronizedList(new LinkedList<FailedTest>());
private final List<TestNotPossible> impossibleIntegrationTests = Collections.synchronizedList(new LinkedList<TestNotPossible>());
// TODO: Ideally three would only be a list of disabledTests, but since we do not process a disabled test class
// any further, we can not determine the concrete disabled tests.
private final List<DisabledTestClass> disabledTestClasses = Collections.synchronizedList(new ArrayList<>());
private final List<DisabledTest> disabledTests = Collections.synchronizedList(new ArrayList<>());
private final Map<Class<? extends AbstractSmackIntTest>, Throwable> impossibleTestClasses = new HashMap<>();
TestRunResult() {
}
public String getTestRunId() {
return testRunId;
}
public int getNumberOfAvailableTests() {
return successfulIntegrationTests.size() + failedIntegrationTests.size() + impossibleIntegrationTests.size();
}
public List<SuccessfulTest> getSuccessfulTests() {
return Collections.unmodifiableList(successfulIntegrationTests);
}
public List<FailedTest> getFailedTests() {
return Collections.unmodifiableList(failedIntegrationTests);
}
public List<TestNotPossible> getNotPossibleTests() {
return Collections.unmodifiableList(impossibleIntegrationTests);
}
public Map<Class<? extends AbstractSmackIntTest>, Throwable> getImpossibleTestClasses() {
return Collections.unmodifiableMap(impossibleTestClasses);
}
}
final class PreparedTest {
private final AbstractSmackIntTest test;
private final List<ConcreteTest> concreteTests;
private final Method beforeClassMethod;
private final Method afterClassMethod;
private PreparedTest(AbstractSmackIntTest test, List<ConcreteTest> concreteTests) {
this.test = test;
this.concreteTests = concreteTests;
Class<? extends AbstractSmackIntTest> testClass = test.getClass();
beforeClassMethod = getSinttestSpecialMethod(testClass, BeforeClass.class);
afterClassMethod = getSinttestSpecialMethod(testClass, AfterClass.class);
}
public void run() throws InterruptedException, XMPPException, IOException, SmackException {
try {
// Run the @BeforeClass methods (if any)
executeSinttestSpecialMethod(beforeClassMethod);
for (ConcreteTest concreteTest : concreteTests) {
runConcreteTest(concreteTest);
}
}
finally {
executeSinttestSpecialMethod(afterClassMethod);
}
}
private void executeSinttestSpecialMethod(Method method) {
if (method == null) {
return;
}
try {
method.invoke(test);
}
catch (InvocationTargetException | IllegalAccessException e) {
LOGGER.log(Level.SEVERE, "Exception executing " + method, e);
}
catch (IllegalArgumentException e) {
throw new AssertionError(e);
}
}
}
@SuppressWarnings("unchecked")
private static Method getSinttestSpecialMethod(Class<? extends AbstractSmackIntTest> testClass, Class<? extends Annotation> annotation) {
Set<Method> specialClassMethods = getAllMethods(testClass,
withAnnotation(annotation), withReturnType(Void.TYPE),
withParametersCount(0), withModifier(Modifier.PUBLIC
));
// See if there are any methods that have a special but a wrong signature
Set<Method> allSpecialClassMethods = getAllMethods(testClass, withAnnotation(annotation));
allSpecialClassMethods.removeAll(specialClassMethods);
if (!allSpecialClassMethods.isEmpty()) {
throw new IllegalArgumentException(annotation + " methods with wrong signature found");
}
if (specialClassMethods.size() == 1) {
return specialClassMethods.iterator().next();
}
else if (specialClassMethods.size() > 1) {
throw new IllegalArgumentException("Only one @BeforeClass method allowed");
}
return null;
}
static final class ConcreteTest {
private final TestType testType;
private final Method method;
private final Executor executor;
private final String[] subdescriptons;
private ConcreteTest(TestType testType, Method method, Executor executor, String... subdescriptions) {
this.testType = testType;
this.method = method;
this.executor = executor;
this.subdescriptons = subdescriptions;
}
private transient String stringCache;
@Override
public String toString() {
if (stringCache != null) {
return stringCache;
}
StringBuilder sb = new StringBuilder();
sb.append(method.getDeclaringClass().getSimpleName())
.append('.')
.append(method.getName())
.append(" (")
.append(testType.name());
if (subdescriptons != null && subdescriptons.length > 0) {
sb.append(", ");
StringUtils.appendTo(Arrays.asList(subdescriptons), sb);
}
sb.append(')');
stringCache = sb.toString();
return stringCache;
}
private interface Executor {
/**
* Execute the test.
*
* @throws IllegalAccessException
* @throws InterruptedException if the calling thread was interrupted.
* @throws InvocationTargetException if the reflective invoked test throws an exception.
* @throws XMPPException in case an XMPPException happens when <em>preparing</em> the test.
* @throws IOException in case an IOException happens when <em>preparing</em> the test.
* @throws SmackException in case an SmackException happens when <em>preparing</em> the test.
*/
void execute() throws IllegalAccessException, InterruptedException, InvocationTargetException,
XMPPException, IOException, SmackException;
}
}
public static final class DisabledTestClass {
private final Class<? extends AbstractSmackIntTest> testClass;
private final String reason;
private DisabledTestClass(Class<? extends AbstractSmackIntTest> testClass, String reason) {
this.testClass = testClass;
this.reason = reason;
}
public Class<? extends AbstractSmackIntTest> getTestClass() {
return testClass;
}
public String getReason() {
return reason;
}
public StringBuilder appendTo(StringBuilder sb) {
return sb.append("Disabled ").append(testClass).append(" because ").append(reason);
}
}
public static final class DisabledTest {
private final Method method;
private final String reason;
private DisabledTest(Method method, String reason) {
this.method = method;
this.reason = reason;
}
public Method getMethod() {
return method;
}
public String getReason() {
return reason;
}
public StringBuilder appendTo(StringBuilder sb) {
return sb.append("Disabled ").append(method).append(" because ").append(reason);
}
}
private final class LowLevelTestMethod {
private final Method testMethod;
private final SmackIntegrationTest smackIntegrationTestAnnotation;
private final boolean parameterListOfConnections;
private LowLevelTestMethod(Method testMethod) {
this.testMethod = testMethod;
smackIntegrationTestAnnotation = testMethod.getAnnotation(SmackIntegrationTest.class);
assert smackIntegrationTestAnnotation != null;
parameterListOfConnections = testMethodParametersIsListOfConnections(testMethod);
}
// TODO: The second parameter should probably be a connection descriptor?
private void invoke(AbstractSmackLowLevelIntegrationTest test,
Class<? extends AbstractXMPPConnection> connectionClass)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException,
InterruptedException, SmackException, IOException, XMPPException {
final int connectionCount;
if (parameterListOfConnections) {
connectionCount = smackIntegrationTestAnnotation.connectionCount();
if (connectionCount < 1) {
throw new IllegalArgumentException(testMethod + " is annotated to use less than one connection ('"
+ connectionCount + ')');
}
} else {
connectionCount = testMethod.getParameterCount();
}
List<? extends AbstractXMPPConnection> connections = connectionManager.constructConnectedConnections(
connectionClass, connectionCount);
if (parameterListOfConnections) {
testMethod.invoke(test, connections);
} else {
Object[] connectionsArray = new Object[connectionCount];
for (int i = 0; i < connectionsArray.length; i++) {
connectionsArray[i] = connections.remove(0);
}
testMethod.invoke(test, connectionsArray);
}
}
@Override
public String toString() {
return testMethod.toString();
}
}
private static boolean testMethodParametersIsListOfConnections(Method testMethod) {
return testMethodParametersIsListOfConnections(testMethod, AbstractXMPPConnection.class);
}
static boolean testMethodParametersIsListOfConnections(Method testMethod, Class<? extends AbstractXMPPConnection> connectionClass) {
Type[] parameterTypes = testMethod.getGenericParameterTypes();
if (parameterTypes.length != 1) {
return false;
}
Class<?> soleParameter = testMethod.getParameterTypes()[0];
if (!Collection.class.isAssignableFrom(soleParameter)) {
return false;
}
ParameterizedType soleParameterizedType = (ParameterizedType) parameterTypes[0];
Type[] actualTypeArguments = soleParameterizedType.getActualTypeArguments();
if (actualTypeArguments.length != 1) {
return false;
}
Type soleActualTypeArgument = actualTypeArguments[0];
if (!(soleActualTypeArgument instanceof Class<?>)) {
return false;
}
Class<?> soleActualTypeArgumentAsClass = (Class<?>) soleActualTypeArgument;
if (!connectionClass.isAssignableFrom(soleActualTypeArgumentAsClass)) {
return false;
}
return true;
}
static boolean testMethodParametersVarargsConnections(Method testMethod, Class<? extends AbstractXMPPConnection> connectionClass) {
Class<?>[] parameterTypes = testMethod.getParameterTypes();
for (Class<?> parameterType : parameterTypes) {
if (!parameterType.isAssignableFrom(connectionClass)) {
return false;
}
}
return true;
}
}
| [sinttest] Recycle low-level test connections
| smack-integration-test/src/main/java/org/igniterealtime/smack/inttest/SmackIntegrationTestFramework.java | [sinttest] Recycle low-level test connections |
|
Java | apache-2.0 | 9e656d7fcf5e64fc5e6e8e3966b92ea898a0a0d1 | 0 | flyersa/MuninMX | /*
*/
package com.unbelievable.munin;
import com.mongodb.BasicDBObject;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
import static com.unbelievable.muninmxcd.p;
import static com.unbelievable.muninmxcd.logger;
import static com.unbelievable.utils.Generic.getUnixtime;
import static com.unbelievable.muninmxcd.logMore;
import static com.unbelievable.utils.Database.dbDeleteMissingPlugins;
import static com.unbelievable.utils.Database.dbUpdateAllPluginsForNode;
import static com.unbelievable.utils.Database.dbUpdatePluginForNode;
import static com.unbelievable.utils.Database.dbUpdateLastContact;
import static com.unbelievable.utils.Generic.isPluginIgnored;
import com.unbelievable.utils.SocketCheck;
import java.util.concurrent.CopyOnWriteArrayList;
/**
*
* @author enricokern
*
*/
public class MuninNode
{
private String str_nodename;
private String str_hostname = "unset";
private int i_port = 4949;
private String str_group;
private boolean b_isRunning = false;
private long l_lastFrontendQuery;
private String str_muninVersion = "";
private transient CopyOnWriteArrayList<MuninPlugin> v_loaded_plugins;
private int i_GraphCount = 0;
private int i_lastRun = 0;
private Integer node_id = 0;
private Integer user_id = 0;
private int queryInterval = 0;
private int last_plugin_load = 0;
private boolean is_init = false;
private transient Socket lastSocket;
private String str_via = "unset";
public void setQueryInterval(Integer p_int)
{
queryInterval = p_int;
}
public int getQueryInterval()
{
return queryInterval;
}
public int getGraphCount()
{
return i_GraphCount;
}
/**
* @return the str_hostname
*/
public String getHostname()
{
return str_hostname;
}
/**
* @param str_hostname the str_hostname to set
*/
public void setHostname(String str_hostname)
{
this.str_hostname = str_hostname;
}
/**
* @return the i_port
*/
public int getPort()
{
return i_port;
}
/**
* @param i_port the i_port to set
*/
public void setPort(int i_port)
{
this.i_port = i_port;
}
/**
* @return the str_nodename
*/
public String getNodename()
{
return str_nodename;
}
/**
* @param str_nodename the str_nodename to set
*/
public void setNodename(String str_nodename)
{
this.str_nodename = str_nodename;
}
/**
* @return the str_group
*/
public String getGroup()
{
return str_group;
}
/**
* @param str_group the str_group to set
*/
public void setGroup(String str_group)
{
this.str_group = str_group;
}
/**
*
* @return if the Thread is active or not
*/
public boolean isRunning()
{
return b_isRunning;
}
public void setIsRunning(boolean p_isRunning)
{
b_isRunning = p_isRunning;
}
/**
* will retrieve a list of loaded munin plugins for this host. If the node was never
* contacted before the list is downloaded from munin-node
*
* @return list of plugins
*/
public CopyOnWriteArrayList<MuninPlugin> getPluginList()
{
if(getLoadedPlugins() == null)
{
// using logger from main class because of gson throwing stackoverflow otherwise
logger.info("Empty Pluginlist for " + this.getHostname() + ". Loading from munin-node...");
loadPlugins();
}
return getLoadedPlugins();
}
/**
* Will load the plugin list from munin-node
*/
public boolean loadPlugins()
{
setLoadedPlugins(new CopyOnWriteArrayList<MuninPlugin>());
String l_lastProceeded = "";
try
{
Socket cs = new Socket();
cs.setKeepAlive(false);
cs.setSoLinger(true, 0);
cs.setReuseAddress(false);
cs.setSoTimeout(30000);
if(!str_via.equals("unset"))
{
cs.connect(new InetSocketAddress(this.getStr_via(), this.getPort()),30000);
}
else
{
cs.connect(new InetSocketAddress(this.getHostname(), this.getPort()),30000);
}
if(p.getProperty("kill.sockets").equals("true"))
{
SocketCheck sc = new SocketCheck(cs,getUnixtime());
sc.setHostname(this.getHostname());
com.unbelievable.muninmxcd.v_sockets.add(sc);
}
PrintStream os = new PrintStream( cs.getOutputStream() );
BufferedReader in = new BufferedReader(new InputStreamReader( cs.getInputStream()) );
String s = in.readLine();
if(s != null)
{
// Set version
os.println("version");
Thread.sleep(250);
s = in.readLine();
String version = s.substring(s.indexOf(":")+1,s.length()).trim();
this.str_muninVersion = version;
// get list of available plugins
if(str_via.equals("unset"))
{
os.println("list");
}
else
{
os.println("list " + str_hostname);
}
Thread.sleep(250);
s = in.readLine();
// if response is empty and host is not via, do a list $hostname
if(s.trim().equals("") && str_via.equals("unset"))
{
logger.info("Plugin Response Empty on " + this.getHostname() + " trying to load with list $hostname");
os.println("list " + this.getHostname());
Thread.sleep(250);
s = in.readLine();
}
String l_tmp;
StringTokenizer l_st = new StringTokenizer(s, " ");
// create plugin
MuninPlugin l_mp = new MuninPlugin();
while(l_st.hasMoreTokens())
{
String l_strPlugin = l_st.nextToken();
if(isPluginIgnored(l_strPlugin.toUpperCase()))
{
continue;
}
l_mp.setPluginName(l_strPlugin);
os.println("config " + l_strPlugin);
// create graphs for plugin
int l_iGraphsFound = 0;
int l_iTmp = 0;
MuninGraph l_mg = new MuninGraph();
while ((l_tmp = in.readLine()) != null)
{
if(l_tmp.startsWith("."))
{
break;
}
// collect graphs only for plugin
String l_strName;
String l_strType;
String l_strValue;
if(!l_tmp.contains("graph_") && !l_tmp.trim().equals("") && !l_tmp.contains("host_name") && !l_tmp.contains("multigraph") && !l_tmp.trim().equals("graph no") && !l_tmp.trim().equals("# Bad exit") && !l_tmp.trim().contains("info Currently our peer") && !l_tmp.trim().startsWith("#") && !l_tmp.trim().contains("Bonding interface errors"))
{
l_lastProceeded = l_tmp;
l_strName = l_tmp.substring(0,l_tmp.indexOf("."));
l_strType = l_tmp.substring(l_tmp.indexOf(".")+1,l_tmp.indexOf(" "));
l_strValue = l_tmp.substring(l_tmp.indexOf(" ")+1,l_tmp.length());
//System.err.println("Name: " + l_strName + " Type: " + l_strType + " Value: " + l_strValue);
if(l_strType.equals("label"))
{
l_iTmp++;
if(l_iTmp > 1)
{
l_mp.addGraph(l_mg);
l_mg = new MuninGraph();
}
l_mg.setGraphName(l_strName);
l_mg.setGraphLabel(l_strValue);
}
else if(l_strType.equals("draw"))
{
l_mg.setGraphDraw(l_strValue);
}
else if(l_strType.equals("type"))
{
l_mg.setGraphType(l_strValue);
}
else if(l_strType.equals("info"))
{
l_mg.setGraphInfo(l_strValue);
}
//System.out.println(l_strName);
//System.out.println(l_strType);
//System.out.println(l_strValue);
}
else
{
// set plugin title
if(l_tmp.contains("graph_title"))
{
l_mp.setPluginTitle(l_tmp.substring(12,l_tmp.length()));
}
// set plugin info, if any
if(l_tmp.contains("graph_info"))
{
l_mp.setPluginInfo(l_tmp.substring(11,l_tmp.length()));
}
// set graph category
if(l_tmp.contains("graph_category"))
{
l_mp.setPluginCategory(l_tmp.substring(15,l_tmp.length()));
}
// set graph vlabel
if(l_tmp.contains("graph_vlabel"))
{
l_mp.setPluginLabel(l_tmp.substring(13,l_tmp.length()));
}
}
}
// add to pluginlist
l_mp.addGraph(l_mg);
// add plugin if it got valid graphs
if(l_mp.getGraphs().size() > 0)
{
getLoadedPlugins().add(l_mp);
}
l_mp = null;
l_mp = new MuninPlugin();
//String l_strGraphTitle = s.substring(s.indexOf("graph_title") + 11,s.length());
//System.out.println(" - " + l_strGraphTitle);
}
cs.close();
in.close();
os.close();
last_plugin_load = getUnixtime();
//System.out.println(s);
}
else
{
cs.close();
in.close();
os.close();
logger.warn("Error loading plugins on " + str_hostname + " ("+this.getNode_id()+"). Check connectivity or munin-node");
}
/*
for (MuninPlugin l_mn : getLoadedPlugins()) {
i_GraphCount = i_GraphCount + l_mn.getGraphs().size();
logger.debug(l_mn.getGraphs().size() + " graphs found for plugin: " + l_mn.getPluginName().toUpperCase() + " on node: " + this.getNodename());
}*/
} catch (Exception ex) {
logger.error("Error loading plugins on " + str_hostname + " ("+this.getNode_id()+") : " + ex.getMessage());
ex.printStackTrace();
return false;
}
return true;
}
public void run() {
b_isRunning = true;
if(this.str_via.equals("unset"))
{
logger.info(getHostname() + " Monitoring job started");
}
else
{
logger.info(getHostname() + " (VIA: "+this.str_via+") Monitoring job started");
}
int iCurTime = getUnixtime();
int iPluginRefreshTime = last_plugin_load + 86400;
try {
// update plugins, maybe we have some new :)
// double try to load plugins if fail
if(getPluginList().size() > 1)
{
if(!is_init)
{
logger.info("[Job: " + getHostname() + "] Updating Database");
// update graphs in database too
for(MuninPlugin it_pl : getPluginList()) {
if(it_pl.getGraphs().size() > 0)
{
//logger.info(it_pl.getPluginName());
dbUpdatePluginForNode(getNode_id(),it_pl);
}
}
// delete now missing plugins
dbDeleteMissingPlugins(getNode_id(),getPluginList());
logger.info("[Job: " + getHostname() + "] Databaseupdate Done");
is_init = true;
}
else
{
if(iCurTime > iPluginRefreshTime )
{
logger.info("Refreshing Plugins on " + this.getHostname());
this.loadPlugins();
dbUpdateAllPluginsForNode(this);
}
}
}
else
{
this.loadPlugins();
}
Socket clientSocket = new Socket();
clientSocket.setSoTimeout(30000);
clientSocket.setKeepAlive(false);
clientSocket.setReuseAddress(false);
if(this.str_via.equals("unset"))
{
clientSocket.connect(new InetSocketAddress(this.getHostname(), this.getPort()),30000);
}
else
{
clientSocket.connect(new InetSocketAddress(this.getStr_via(), this.getPort()),30000);
}
lastSocket = clientSocket;
SocketCheck sc = new SocketCheck(clientSocket,getUnixtime());
if(p.getProperty("kill.sockets").equals("true"))
{
sc.setHostname(this.getHostname());
com.unbelievable.muninmxcd.v_sockets.add(sc);
}
this.i_lastRun = getUnixtime();
// update graphs for all plugins
Iterator it = this.getLoadedPlugins().iterator();
while(it.hasNext())
{
MuninPlugin l_mp = (MuninPlugin) it.next();
if(logMore)
{
logger.info(getHostname() + " fetching graphs for " + l_mp.getPluginName().toUpperCase());
}
l_mp.updateAllGraps(this.getHostname(), this.getPort(), clientSocket, getQueryInterval());
// add all graphs to insertion queue for mongodb
queuePluginFetch(l_mp.returnAllGraphs(), l_mp.getPluginName());
}
clientSocket.close();
if(p.getProperty("kill.sockets").equals("true"))
{
com.unbelievable.muninmxcd.v_sockets.remove(sc);
}
sc = null;
} catch (Exception ex) {
logger.fatal("Error in thread for host: " + getHostname() + " : " + ex.getLocalizedMessage());
ex.printStackTrace();
}
int iRunTime = getUnixtime() - iCurTime;
dbUpdateLastContact(this.getNode_id());
logger.info(getHostname() + " Monitoring job stopped - runtime: " + iRunTime);
}
/**
* fill insertion queue with current graph values for each plugin
*/
private void queuePluginFetch(ArrayList<MuninGraph> p_almg, String p_strPluginName)
{
Iterator<MuninGraph> it = p_almg.iterator();
while(it.hasNext())
{
MuninGraph mg = it.next();
// prepare document object
BasicDBObject doc = new BasicDBObject();
doc.put("hostname", this.getHostname());
doc.put("plugin", p_strPluginName);
doc.put("graph", mg.getGraphName());
doc.put("value", mg.getGraphValue().toString());
doc.put("recv", mg.getLastGraphTime());
doc.put("user_id", this.getUser_id());
doc.put("nodeid", this.getNode_id());
// only queue if plugin is initialized or it is a if_err plugin
if(mg.isInit() || p_strPluginName.startsWith("if_err") || p_strPluginName.equals("swap"))
{
com.unbelievable.muninmxcd.mongo_queue.add(doc);
mg.setLastQueued(getUnixtime());
logger.debug("Queued: " + this.getHostname() + " (" + p_strPluginName + " / " + mg.getGraphName() + ") Value: " + mg.getGraphValue());
if(logMore)
{
logger.info("Queued: " + this.getHostname() + " (" + p_strPluginName + " / " + mg.getGraphName() + ") Value: " + mg.getGraphValue());
}
}
}
}
/**
* @return the v_loaded_plugins
*/
public CopyOnWriteArrayList<MuninPlugin> getLoadedPlugins() {
return v_loaded_plugins;
}
/**
* @param v_loaded_plugins the v_loaded_plugins to set
*/
public void setLoadedPlugins(CopyOnWriteArrayList<MuninPlugin> v_loaded_plugins) {
this.v_loaded_plugins = v_loaded_plugins;
}
/**
* @return the l_lastFrontendQuery
*/
public long getLastFrontendQuery() {
return l_lastFrontendQuery;
}
/**
* @param l_lastFrontendQuery the l_lastFrontendQuery to set
*/
public void setLastFrontendQuery(long l_lastFrontendQuery) {
this.l_lastFrontendQuery = l_lastFrontendQuery;
}
/**
* Sets lastFrontendQuery to current unixtime
*/
public void setLastFrontendQuery()
{
l_lastFrontendQuery = System.currentTimeMillis() / 1000L;
}
private void cleanUpGraphs()
{
Iterator it = this.getLoadedPlugins().iterator();
while(it.hasNext())
{
MuninPlugin l_mp = (MuninPlugin) it.next();
Iterator itg = l_mp.getGraphs().iterator();
while(itg.hasNext())
{
MuninGraph l_mg = (MuninGraph) itg.next();
l_mg.setGraphValue("0");
l_mg.setLastGraphValue("0");
l_mg.setLastGraphValueCounter("0");
}
}
}
/**
* @return the node_id
*/
public Integer getNode_id() {
return node_id;
}
/**
* @param node_id the node_id to set
*/
public void setNode_id(Integer node_id) {
this.node_id = node_id;
}
/**
* @return the user_id
*/
public Integer getUser_id() {
return user_id;
}
/**
* @param user_id the user_id to set
*/
public void setUser_id(Integer user_id) {
this.user_id = user_id;
}
/**
* @return the lastSocket
*/
public Socket getLastSocket() {
return lastSocket;
}
/**
* @return the str_via
*/
public String getStr_via() {
return str_via;
}
/**
* @param str_via the str_via to set
*/
public void setStr_via(String str_via) {
this.str_via = str_via;
}
}
| src/com/unbelievable/munin/MuninNode.java | /*
*/
package com.unbelievable.munin;
import com.mongodb.BasicDBObject;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
import static com.unbelievable.muninmxcd.p;
import static com.unbelievable.muninmxcd.logger;
import static com.unbelievable.utils.Generic.getUnixtime;
import static com.unbelievable.muninmxcd.logMore;
import static com.unbelievable.utils.Database.dbDeleteMissingPlugins;
import static com.unbelievable.utils.Database.dbUpdateAllPluginsForNode;
import static com.unbelievable.utils.Database.dbUpdatePluginForNode;
import static com.unbelievable.utils.Database.dbUpdateLastContact;
import static com.unbelievable.utils.Generic.isPluginIgnored;
import com.unbelievable.utils.SocketCheck;
import java.util.concurrent.CopyOnWriteArrayList;
/**
*
* @author enricokern
*
*/
public class MuninNode
{
private String str_nodename;
private String str_hostname = "unset";
private int i_port = 4949;
private String str_group;
private boolean b_isRunning = false;
private long l_lastFrontendQuery;
private String str_muninVersion = "";
private transient CopyOnWriteArrayList<MuninPlugin> v_loaded_plugins;
private int i_GraphCount = 0;
private int i_lastRun = 0;
private Integer node_id = 0;
private Integer user_id = 0;
private int queryInterval = 0;
private int last_plugin_load = 0;
private boolean is_init = false;
private transient Socket lastSocket;
private String str_via = "unset";
public void setQueryInterval(Integer p_int)
{
queryInterval = p_int;
}
public int getQueryInterval()
{
return queryInterval;
}
public int getGraphCount()
{
return i_GraphCount;
}
/**
* @return the str_hostname
*/
public String getHostname()
{
return str_hostname;
}
/**
* @param str_hostname the str_hostname to set
*/
public void setHostname(String str_hostname)
{
this.str_hostname = str_hostname;
}
/**
* @return the i_port
*/
public int getPort()
{
return i_port;
}
/**
* @param i_port the i_port to set
*/
public void setPort(int i_port)
{
this.i_port = i_port;
}
/**
* @return the str_nodename
*/
public String getNodename()
{
return str_nodename;
}
/**
* @param str_nodename the str_nodename to set
*/
public void setNodename(String str_nodename)
{
this.str_nodename = str_nodename;
}
/**
* @return the str_group
*/
public String getGroup()
{
return str_group;
}
/**
* @param str_group the str_group to set
*/
public void setGroup(String str_group)
{
this.str_group = str_group;
}
/**
*
* @return if the Thread is active or not
*/
public boolean isRunning()
{
return b_isRunning;
}
public void setIsRunning(boolean p_isRunning)
{
b_isRunning = p_isRunning;
}
/**
* will retrieve a list of loaded munin plugins for this host. If the node was never
* contacted before the list is downloaded from munin-node
*
* @return list of plugins
*/
public CopyOnWriteArrayList<MuninPlugin> getPluginList()
{
if(getLoadedPlugins() == null)
{
// using logger from main class because of gson throwing stackoverflow otherwise
logger.info("Empty Pluginlist for " + this.getHostname() + ". Loading from munin-node...");
loadPlugins();
}
return getLoadedPlugins();
}
/**
* Will load the plugin list from munin-node
*/
public boolean loadPlugins()
{
setLoadedPlugins(new CopyOnWriteArrayList<MuninPlugin>());
String l_lastProceeded = "";
try
{
Socket cs = new Socket();
cs.setKeepAlive(false);
cs.setSoLinger(true, 0);
cs.setReuseAddress(false);
cs.setSoTimeout(30000);
if(!str_via.equals("unset"))
{
cs.connect(new InetSocketAddress(this.getStr_via(), this.getPort()),30000);
}
else
{
cs.connect(new InetSocketAddress(this.getHostname(), this.getPort()),30000);
}
if(p.getProperty("kill.sockets").equals("true"))
{
SocketCheck sc = new SocketCheck(cs,getUnixtime());
sc.setHostname(this.getHostname());
com.unbelievable.muninmxcd.v_sockets.add(sc);
}
PrintStream os = new PrintStream( cs.getOutputStream() );
BufferedReader in = new BufferedReader(new InputStreamReader( cs.getInputStream()) );
String s = in.readLine();
if(s != null)
{
// Set version
os.println("version");
Thread.sleep(250);
s = in.readLine();
String version = s.substring(s.indexOf(":")+1,s.length()).trim();
this.str_muninVersion = version;
// get list of available plugins
if(str_via.equals("unset"))
{
os.println("list");
}
else
{
os.println("list " + str_hostname);
}
Thread.sleep(250);
s = in.readLine();
String l_tmp;
StringTokenizer l_st = new StringTokenizer(s, " ");
// create plugin
MuninPlugin l_mp = new MuninPlugin();
while(l_st.hasMoreTokens())
{
String l_strPlugin = l_st.nextToken();
if(isPluginIgnored(l_strPlugin.toUpperCase()))
{
continue;
}
l_mp.setPluginName(l_strPlugin);
os.println("config " + l_strPlugin);
// create graphs for plugin
int l_iGraphsFound = 0;
int l_iTmp = 0;
MuninGraph l_mg = new MuninGraph();
while ((l_tmp = in.readLine()) != null)
{
if(l_tmp.startsWith("."))
{
break;
}
// collect graphs only for plugin
String l_strName;
String l_strType;
String l_strValue;
if(!l_tmp.contains("graph_") && !l_tmp.trim().equals("") && !l_tmp.contains("host_name") && !l_tmp.contains("multigraph") && !l_tmp.trim().equals("graph no") && !l_tmp.trim().equals("# Bad exit") && !l_tmp.trim().contains("info Currently our peer") && !l_tmp.trim().startsWith("#") && !l_tmp.trim().contains("Bonding interface errors"))
{
l_lastProceeded = l_tmp;
l_strName = l_tmp.substring(0,l_tmp.indexOf("."));
l_strType = l_tmp.substring(l_tmp.indexOf(".")+1,l_tmp.indexOf(" "));
l_strValue = l_tmp.substring(l_tmp.indexOf(" ")+1,l_tmp.length());
//System.err.println("Name: " + l_strName + " Type: " + l_strType + " Value: " + l_strValue);
if(l_strType.equals("label"))
{
l_iTmp++;
if(l_iTmp > 1)
{
l_mp.addGraph(l_mg);
l_mg = new MuninGraph();
}
l_mg.setGraphName(l_strName);
l_mg.setGraphLabel(l_strValue);
}
else if(l_strType.equals("draw"))
{
l_mg.setGraphDraw(l_strValue);
}
else if(l_strType.equals("type"))
{
l_mg.setGraphType(l_strValue);
}
else if(l_strType.equals("info"))
{
l_mg.setGraphInfo(l_strValue);
}
//System.out.println(l_strName);
//System.out.println(l_strType);
//System.out.println(l_strValue);
}
else
{
// set plugin title
if(l_tmp.contains("graph_title"))
{
l_mp.setPluginTitle(l_tmp.substring(12,l_tmp.length()));
}
// set plugin info, if any
if(l_tmp.contains("graph_info"))
{
l_mp.setPluginInfo(l_tmp.substring(11,l_tmp.length()));
}
// set graph category
if(l_tmp.contains("graph_category"))
{
l_mp.setPluginCategory(l_tmp.substring(15,l_tmp.length()));
}
// set graph vlabel
if(l_tmp.contains("graph_vlabel"))
{
l_mp.setPluginLabel(l_tmp.substring(13,l_tmp.length()));
}
}
}
// add to pluginlist
l_mp.addGraph(l_mg);
// add plugin if it got valid graphs
if(l_mp.getGraphs().size() > 0)
{
getLoadedPlugins().add(l_mp);
}
l_mp = null;
l_mp = new MuninPlugin();
//String l_strGraphTitle = s.substring(s.indexOf("graph_title") + 11,s.length());
//System.out.println(" - " + l_strGraphTitle);
}
cs.close();
in.close();
os.close();
last_plugin_load = getUnixtime();
//System.out.println(s);
}
else
{
cs.close();
in.close();
os.close();
logger.warn("Error loading plugins on " + str_hostname + " ("+this.getNode_id()+"). Check connectivity or munin-node");
}
/*
for (MuninPlugin l_mn : getLoadedPlugins()) {
i_GraphCount = i_GraphCount + l_mn.getGraphs().size();
logger.debug(l_mn.getGraphs().size() + " graphs found for plugin: " + l_mn.getPluginName().toUpperCase() + " on node: " + this.getNodename());
}*/
} catch (Exception ex) {
logger.error("Error loading plugins on " + str_hostname + " ("+this.getNode_id()+") : " + ex.getMessage());
ex.printStackTrace();
return false;
}
return true;
}
public void run() {
b_isRunning = true;
if(this.str_via.equals("unset"))
{
logger.info(getHostname() + " Monitoring job started");
}
else
{
logger.info(getHostname() + " (VIA: "+this.str_via+") Monitoring job started");
}
int iCurTime = getUnixtime();
int iPluginRefreshTime = last_plugin_load + 86400;
try {
// update plugins, maybe we have some new :)
// double try to load plugins if fail
if(getPluginList().size() > 1)
{
if(!is_init)
{
logger.info("[Job: " + getHostname() + "] Updating Database");
// update graphs in database too
for(MuninPlugin it_pl : getPluginList()) {
if(it_pl.getGraphs().size() > 0)
{
//logger.info(it_pl.getPluginName());
dbUpdatePluginForNode(getNode_id(),it_pl);
}
}
// delete now missing plugins
dbDeleteMissingPlugins(getNode_id(),getPluginList());
logger.info("[Job: " + getHostname() + "] Databaseupdate Done");
is_init = true;
}
else
{
if(iCurTime > iPluginRefreshTime )
{
logger.info("Refreshing Plugins on " + this.getHostname());
this.loadPlugins();
dbUpdateAllPluginsForNode(this);
}
}
}
else
{
this.loadPlugins();
}
Socket clientSocket = new Socket();
clientSocket.setSoTimeout(30000);
clientSocket.setKeepAlive(false);
clientSocket.setReuseAddress(false);
if(this.str_via.equals("unset"))
{
clientSocket.connect(new InetSocketAddress(this.getHostname(), this.getPort()),30000);
}
else
{
clientSocket.connect(new InetSocketAddress(this.getStr_via(), this.getPort()),30000);
}
lastSocket = clientSocket;
SocketCheck sc = new SocketCheck(clientSocket,getUnixtime());
if(p.getProperty("kill.sockets").equals("true"))
{
sc.setHostname(this.getHostname());
com.unbelievable.muninmxcd.v_sockets.add(sc);
}
this.i_lastRun = getUnixtime();
// update graphs for all plugins
Iterator it = this.getLoadedPlugins().iterator();
while(it.hasNext())
{
MuninPlugin l_mp = (MuninPlugin) it.next();
if(logMore)
{
logger.info(getHostname() + " fetching graphs for " + l_mp.getPluginName().toUpperCase());
}
l_mp.updateAllGraps(this.getHostname(), this.getPort(), clientSocket, getQueryInterval());
// add all graphs to insertion queue for mongodb
queuePluginFetch(l_mp.returnAllGraphs(), l_mp.getPluginName());
}
clientSocket.close();
if(p.getProperty("kill.sockets").equals("true"))
{
com.unbelievable.muninmxcd.v_sockets.remove(sc);
}
sc = null;
} catch (Exception ex) {
logger.fatal("Error in thread for host: " + getHostname() + " : " + ex.getLocalizedMessage());
ex.printStackTrace();
}
int iRunTime = getUnixtime() - iCurTime;
dbUpdateLastContact(this.getNode_id());
logger.info(getHostname() + " Monitoring job stopped - runtime: " + iRunTime);
}
/**
* fill insertion queue with current graph values for each plugin
*/
private void queuePluginFetch(ArrayList<MuninGraph> p_almg, String p_strPluginName)
{
Iterator<MuninGraph> it = p_almg.iterator();
while(it.hasNext())
{
MuninGraph mg = it.next();
// prepare document object
BasicDBObject doc = new BasicDBObject();
doc.put("hostname", this.getHostname());
doc.put("plugin", p_strPluginName);
doc.put("graph", mg.getGraphName());
doc.put("value", mg.getGraphValue().toString());
doc.put("recv", mg.getLastGraphTime());
doc.put("user_id", this.getUser_id());
doc.put("nodeid", this.getNode_id());
// only queue if plugin is initialized or it is a if_err plugin
if(mg.isInit() || p_strPluginName.startsWith("if_err") || p_strPluginName.equals("swap"))
{
com.unbelievable.muninmxcd.mongo_queue.add(doc);
mg.setLastQueued(getUnixtime());
logger.debug("Queued: " + this.getHostname() + " (" + p_strPluginName + " / " + mg.getGraphName() + ") Value: " + mg.getGraphValue());
if(logMore)
{
logger.info("Queued: " + this.getHostname() + " (" + p_strPluginName + " / " + mg.getGraphName() + ") Value: " + mg.getGraphValue());
}
}
}
}
/**
* @return the v_loaded_plugins
*/
public CopyOnWriteArrayList<MuninPlugin> getLoadedPlugins() {
return v_loaded_plugins;
}
/**
* @param v_loaded_plugins the v_loaded_plugins to set
*/
public void setLoadedPlugins(CopyOnWriteArrayList<MuninPlugin> v_loaded_plugins) {
this.v_loaded_plugins = v_loaded_plugins;
}
/**
* @return the l_lastFrontendQuery
*/
public long getLastFrontendQuery() {
return l_lastFrontendQuery;
}
/**
* @param l_lastFrontendQuery the l_lastFrontendQuery to set
*/
public void setLastFrontendQuery(long l_lastFrontendQuery) {
this.l_lastFrontendQuery = l_lastFrontendQuery;
}
/**
* Sets lastFrontendQuery to current unixtime
*/
public void setLastFrontendQuery()
{
l_lastFrontendQuery = System.currentTimeMillis() / 1000L;
}
private void cleanUpGraphs()
{
Iterator it = this.getLoadedPlugins().iterator();
while(it.hasNext())
{
MuninPlugin l_mp = (MuninPlugin) it.next();
Iterator itg = l_mp.getGraphs().iterator();
while(itg.hasNext())
{
MuninGraph l_mg = (MuninGraph) itg.next();
l_mg.setGraphValue("0");
l_mg.setLastGraphValue("0");
l_mg.setLastGraphValueCounter("0");
}
}
}
/**
* @return the node_id
*/
public Integer getNode_id() {
return node_id;
}
/**
* @param node_id the node_id to set
*/
public void setNode_id(Integer node_id) {
this.node_id = node_id;
}
/**
* @return the user_id
*/
public Integer getUser_id() {
return user_id;
}
/**
* @param user_id the user_id to set
*/
public void setUser_id(Integer user_id) {
this.user_id = user_id;
}
/**
* @return the lastSocket
*/
public Socket getLastSocket() {
return lastSocket;
}
/**
* @return the str_via
*/
public String getStr_via() {
return str_via;
}
/**
* @param str_via the str_via to set
*/
public void setStr_via(String str_via) {
this.str_via = str_via;
}
}
| added list command | src/com/unbelievable/munin/MuninNode.java | added list command |
|
Java | apache-2.0 | 5409126b6a29130460fb917be1e9a4cb34c4c4e1 | 0 | Sargul/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,dbeaver/dbeaver | package org.jkiss.dbeaver.ui.controls.resultset.valuefilter;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.layout.TableColumnLayout;
import org.eclipse.jface.viewers.ColumnLabelProvider;
import org.eclipse.jface.viewers.ColumnWeightData;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.TableViewerColumn;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.events.TraverseEvent;
import org.eclipse.swt.events.TraverseListener;
import org.eclipse.swt.events.VerifyEvent;
import org.eclipse.swt.events.VerifyListener;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.model.DBPDataKind;
import org.jkiss.dbeaver.model.data.DBDAttributeBinding;
import org.jkiss.dbeaver.model.data.DBDDisplayFormat;
import org.jkiss.dbeaver.model.data.DBDLabelValuePair;
import org.jkiss.dbeaver.model.exec.DBCLogicalOperator;
import org.jkiss.dbeaver.ui.controls.resultset.ResultSetRow;
import org.jkiss.dbeaver.ui.controls.resultset.ResultSetViewer;
public class FilterValueEditMenu extends Composite{
private Shell shell;
private Object value;
private Display display;
private GenericFilterValueEdit filter;
public FilterValueEditMenu(Composite parent, int style, @Nullable Control trigger,@NotNull ResultSetViewer viewer, @NotNull DBDAttributeBinding attr, @NotNull ResultSetRow[] rows) {
super(parent, style);
filter = new GenericFilterValueEdit(viewer, attr, rows, DBCLogicalOperator.IN);
this.display = Display.getDefault();
init(trigger);
}
private void init(Control trigger) {
shell = new Shell(display, SWT.BORDER);
shell.setLayout(new GridLayout());
// shell.addTraverseListener(new TraverseListener() {
//
// @Override
// public void keyTraversed(TraverseEvent e) {
// if (e.detail == SWT.TRAVERSE_ESCAPE)
// shell.setVisible(false);
// shell.close();
// }
// });
shell.addListener(SWT.Deactivate, e-> {
if (shell != null && !shell.isDisposed()) {
shell.setVisible(false);
shell.dispose();
}
});
createMultiValueSelector(shell);
shell.pack();
if(trigger != null) {
trigger.addListener(SWT.MouseDown, e -> {
setVisible(true);
});
}
}
public void setLocation(Point location) {
shell.setLocation(location);
}
public int open() {
shell.open();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
return isAnythingSelected() ? IDialogConstants.OK_ID : IDialogConstants.CANCEL_ID;
}
private boolean isAnythingSelected() {
return value != null;
}
private void createMultiValueSelector(Composite parent) {
Composite tableComposite = new Composite(parent, SWT.NONE);
GridData gd = new GridData(GridData.FILL_BOTH);
gd.widthHint = 400;
gd.heightHint = 300;
tableComposite.setLayoutData(gd);
filter.setupTable(tableComposite, SWT.BORDER | SWT.SINGLE | SWT.NO_SCROLL | SWT.V_SCROLL, false, false, SWT.NONE);
filter.table.getTable().setBackground( Display.getCurrent().getSystemColor(SWT.COLOR_WIDGET_BACKGROUND));
TableViewerColumn resultsetColumn = new TableViewerColumn(filter.table, SWT.NONE);
resultsetColumn.setLabelProvider(new ColumnLabelProvider() {
@Override
public String getText(Object element) {
return filter.attr.getValueHandler().getValueDisplayString(filter.attr, ((DBDLabelValuePair)element).getValue(), DBDDisplayFormat.UI);
}});
resultsetColumn.getColumn().setResizable(false);
TableColumnLayout tableLayout = new TableColumnLayout();
tableComposite.setLayout(tableLayout);
// Resize the column to fit the contents
resultsetColumn.getColumn().pack();
int resultsetWidth = resultsetColumn.getColumn().getWidth();
// Set column to fill 100%, but with its packed width as minimum
tableLayout.setColumnData(resultsetColumn.getColumn(), new ColumnWeightData(100, resultsetWidth));
filter.table.addSelectionChangedListener( new ISelectionChangedListener() {
@Override
public void selectionChanged(SelectionChangedEvent event) {
value = ((DBDLabelValuePair) event.getStructuredSelection().getFirstElement());
shell.setVisible(false);
shell.close();
}
});
if ( filter.attr.getDataKind() == DBPDataKind.STRING) {
filter.addFilterTextbox(parent);
}
filter.filterPattern = null;
filter.loadValues();
}
public Object getValue() {
return ((DBDLabelValuePair) value).getValue();
}
}
| plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/valuefilter/FilterValueEditMenu.java | package org.jkiss.dbeaver.ui.controls.resultset.valuefilter;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.layout.TableColumnLayout;
import org.eclipse.jface.viewers.ColumnLabelProvider;
import org.eclipse.jface.viewers.ColumnWeightData;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.TableViewerColumn;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.TraverseEvent;
import org.eclipse.swt.events.TraverseListener;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.model.DBPDataKind;
import org.jkiss.dbeaver.model.data.DBDAttributeBinding;
import org.jkiss.dbeaver.model.data.DBDDisplayFormat;
import org.jkiss.dbeaver.model.data.DBDLabelValuePair;
import org.jkiss.dbeaver.model.exec.DBCLogicalOperator;
import org.jkiss.dbeaver.ui.controls.resultset.ResultSetRow;
import org.jkiss.dbeaver.ui.controls.resultset.ResultSetViewer;
public class FilterValueEditMenu extends Composite{
private Shell shell;
private Object value;
private Display display;
private GenericFilterValueEdit filter;
public FilterValueEditMenu(Composite parent, int style, @Nullable Control trigger,@NotNull ResultSetViewer viewer, @NotNull DBDAttributeBinding attr, @NotNull ResultSetRow[] rows) {
super(parent, style);
filter = new GenericFilterValueEdit(viewer, attr, rows, DBCLogicalOperator.IN);
this.display = Display.getDefault();
init(trigger);
}
private void init(Control trigger) {
shell = new Shell(display, SWT.BORDER | SWT.APPLICATION_MODAL);
shell.setLayout(new GridLayout());
shell.addTraverseListener(new TraverseListener() {
@Override
public void keyTraversed(TraverseEvent e) {
if (e.detail == SWT.TRAVERSE_ESCAPE)
shell.setVisible(false);
shell.close();
}
});
createMultiValueSelector(shell);
shell.pack();
if(trigger != null) {
trigger.addListener(SWT.MouseDown, e -> {
setVisible(true);
});
}
}
public void setLocation(Point location) {
shell.setLocation(location);
}
public int open() {
shell.open();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
return isAnythingSelected() ? IDialogConstants.OK_ID : IDialogConstants.CANCEL_ID;
}
private boolean isAnythingSelected() {
return value != null;
}
private void createMultiValueSelector(Composite parent) {
Composite tableComposite = new Composite(parent, SWT.NONE);
GridData gd = new GridData(GridData.FILL_BOTH);
gd.widthHint = 400;
gd.heightHint = 300;
tableComposite.setLayoutData(gd);
filter.setupTable(tableComposite, SWT.BORDER | SWT.SINGLE | SWT.NO_SCROLL | SWT.V_SCROLL, false, false, SWT.NONE);
filter.table.getTable().setBackground( Display.getCurrent().getSystemColor(SWT.COLOR_WIDGET_BACKGROUND));
TableViewerColumn resultsetColumn = new TableViewerColumn(filter.table, SWT.NONE);
resultsetColumn.setLabelProvider(new ColumnLabelProvider() {
@Override
public String getText(Object element) {
return filter.attr.getValueHandler().getValueDisplayString(filter.attr, ((DBDLabelValuePair)element).getValue(), DBDDisplayFormat.UI);
}});
resultsetColumn.getColumn().setResizable(false);
TableColumnLayout tableLayout = new TableColumnLayout();
tableComposite.setLayout(tableLayout);
// Resize the column to fit the contents
resultsetColumn.getColumn().pack();
int resultsetWidth = resultsetColumn.getColumn().getWidth();
// Set column to fill 100%, but with its packed width as minimum
tableLayout.setColumnData(resultsetColumn.getColumn(), new ColumnWeightData(100, resultsetWidth));
filter.table.addSelectionChangedListener( new ISelectionChangedListener() {
@Override
public void selectionChanged(SelectionChangedEvent event) {
value = ((DBDLabelValuePair) event.getStructuredSelection().getFirstElement());
shell.setVisible(false);
shell.close();
}
});
if ( filter.attr.getDataKind() == DBPDataKind.STRING) {
filter.addFilterTextbox(parent);
}
filter.filterPattern = null;
filter.loadValues();
}
public Object getValue() {
return ((DBDLabelValuePair) value).getValue();
}
}
| Close drop-down by clicking outside.
Former-commit-id: f2550eb35aad90490862820a7cd496d96f66ec97 | plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/valuefilter/FilterValueEditMenu.java | Close drop-down by clicking outside. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.