conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
if (type == null)
throw new BadParameterEx("type", null);
=======
if (type == null) {
throw new BadParameterEx("type", type);
}
>>>>>>>
if (type == null) {
throw new BadParameterEx("type", null);
}
<<<<<<<
//--------------------------------------------------------------------------
public synchronized void init(Element node) throws BadInputEx, SchedulerException
{
=======
/**
* TODO Javadoc.
*
* @param node
* @throws BadInputEx
* @throws SchedulerException
*/
public void init(Element node) throws BadInputEx, SchedulerException {
>>>>>>>
public synchronized void init(Element node) throws BadInputEx, SchedulerException
{
<<<<<<<
private static Map<String, Class<?>> hsHarvesters = new HashMap<String, Class<?>>();
}
//=============================================================================
=======
======================================================================
=======
private static Map<String, Class> hsHarvesters = new HashMap<String, Class>();
}
>>>>>>>
private static Map<String, Class<?>> hsHarvesters = new HashMap<String, Class<?>>();
} |
<<<<<<<
import org.fao.geonet.services.api.exception.NoResultsFoundException;
=======
import org.fao.geonet.exceptions.UserNotFoundEx;
>>>>>>>
import org.fao.geonet.exceptions.UserNotFoundEx;
import org.fao.geonet.services.api.exception.NoResultsFoundException; |
<<<<<<<
import jeeves.server.context.ServiceContext;
=======
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.StringUtils;
>>>>>>>
import jeeves.server.context.ServiceContext;
import org.apache.commons.lang.StringUtils;
<<<<<<<
localCateg = new CategoryMapper(context);
localGroups = new GroupMapper(context);
localUuids = new UUIDMapper(context.getBean(MetadataRepository.class), params.getUuid());
=======
localCateg = new CategoryMapper(context);
localGroups= new GroupMapper(context);
localUuids = new UUIDMapper(context.getBean(MetadataRepository.class), params.getUuid());
Pair<String, Map<String, Object>> filter =
HarvesterUtil.parseXSLFilter(params.xslfilter, log);
String processName = filter.one();
Map<String, Object> processParams = filter.two();
>>>>>>>
localCateg = new CategoryMapper(context);
localGroups = new GroupMapper(context);
localUuids = new UUIDMapper(context.getBean(MetadataRepository.class), params.getUuid());
Pair<String, Map<String, Object>> filter =
HarvesterUtil.parseXSLFilter(params.xslfilter, log);
String processName = filter.one();
Map<String, Object> processParams = filter.two();
<<<<<<<
private void addMetadata(XmlRequest t, RecordInfo ri) throws Exception {
Element md = retrieveMetadata(t, ri);
=======
private void addMetadata(XmlRequest t, RecordInfo ri, String processName, Map<String, Object> processParams) throws Exception {
Element md = retrieveMetadata(t, ri);
>>>>>>>
private void addMetadata(XmlRequest t, RecordInfo ri, String processName, Map<String, Object> processParams) throws Exception {
Element md = retrieveMetadata(t, ri);
<<<<<<<
if (log.isDebugEnabled()) log.debug(" - Adding metadata with remote id : " + ri.id);
=======
if(log.isDebugEnabled()) log.debug(" - Adding metadata with remote id : "+ ri.id);
// Apply the xsl filter choosed by UI
if (StringUtils.isNotEmpty(params.xslfilter)) {
md = HarvesterUtil.processMetadata(dataMan.getSchema(schema),
md, processName, processParams, log);
}
>>>>>>>
if (log.isDebugEnabled()) log.debug(" - Adding metadata with remote id : " + ri.id);
// Apply the xsl filter choosed by UI
if (StringUtils.isNotEmpty(params.xslfilter)) {
md = HarvesterUtil.processMetadata(dataMan.getSchema(schema),
md, processName, processParams, log);
}
<<<<<<<
return null;
}
}
private void updateMetadata(XmlRequest t, RecordInfo ri, String id) throws Exception {
String date = localUuids.getChangeDate(ri.id);
if (!ri.isMoreRecentThan(date)) {
if (log.isDebugEnabled())
log.debug(" - Metadata XML not changed for remote id : " + ri.id);
result.unchangedMetadata++;
} else {
if (log.isDebugEnabled())
log.debug(" - Updating local metadata for remote id : " + ri.id);
Element md = retrieveMetadata(t, ri);
if (md == null)
return;
=======
return null;
}
}
//--------------------------------------------------------------------------
//---
//--- Private methods : updateMetadata
//---
//--------------------------------------------------------------------------
private void updateMetadata(XmlRequest t, RecordInfo ri, String id, String processName, Map<String, Object> processParams) throws Exception {
String date = localUuids.getChangeDate(ri.id);
if (!ri.isMoreRecentThan(date))
{
if(log.isDebugEnabled()) log.debug(" - Metadata XML not changed for remote id : "+ ri.id);
result.unchangedMetadata++;
}
else
{
if(log.isDebugEnabled()) log.debug(" - Updating local metadata for remote id : "+ ri.id);
Element md = retrieveMetadata(t, ri);
if (md == null) {
return;
}
// The schema of the metadata
String schema = dataMan.autodetectSchema(md, null);
// Apply the xsl filter choosed by UI
if (StringUtils.isNotEmpty(params.xslfilter)) {
md = HarvesterUtil.processMetadata(dataMan.getSchema(schema),
md, processName, processParams, log);
}
>>>>>>>
return null;
}
}
private void updateMetadata(XmlRequest t, RecordInfo ri, String id, String processName, Map<String, Object> processParams) throws Exception {
String date = localUuids.getChangeDate(ri.id);
if (!ri.isMoreRecentThan(date)) {
if (log.isDebugEnabled())
log.debug(" - Metadata XML not changed for remote id : " + ri.id);
result.unchangedMetadata++;
} else {
if (log.isDebugEnabled())
log.debug(" - Updating local metadata for remote id : " + ri.id);
Element md = retrieveMetadata(t, ri);
if (md == null)
return;
// The schema of the metadata
String schema = dataMan.autodetectSchema(md, null);
// Apply the xsl filter choosed by UI
if (StringUtils.isNotEmpty(params.xslfilter)) {
md = HarvesterUtil.processMetadata(dataMan.getSchema(schema),
md, processName, processParams, log);
} |
<<<<<<<
import static com.google.common.io.Files.getNameWithoutExtension;
import static org.fao.geonet.api.ApiParams.API_PARAM_RECORD_UUID;
import static org.fao.geonet.api.records.formatters.FormatterConstants.SCHEMA_PLUGIN_FORMATTER_DIR;
import static org.springframework.data.jpa.domain.Specifications.where;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.Callable;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
=======
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import jeeves.server.context.ServiceContext;
import jeeves.server.dispatchers.ServiceManager;
>>>>>>>
import static com.google.common.io.Files.getNameWithoutExtension;
import static org.fao.geonet.api.ApiParams.API_PARAM_RECORD_UUID;
import static org.fao.geonet.api.records.formatters.FormatterConstants.SCHEMA_PLUGIN_FORMATTER_DIR;
import static org.springframework.data.jpa.domain.Specifications.where;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.Callable;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import jeeves.server.context.ServiceContext;
import jeeves.server.dispatchers.ServiceManager;
<<<<<<<
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import jeeves.server.context.ServiceContext;
import jeeves.server.dispatchers.ServiceManager;
import springfox.documentation.annotations.ApiIgnore;
=======
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.Callable;
import static com.google.common.io.Files.getNameWithoutExtension;
import static org.fao.geonet.api.ApiParams.API_PARAM_RECORD_UUID;
import static org.fao.geonet.api.records.formatters.FormatterConstants.SCHEMA_PLUGIN_FORMATTER_DIR;
import static org.springframework.data.jpa.domain.Specifications.where;
>>>>>>>
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import jeeves.server.context.ServiceContext;
import jeeves.server.dispatchers.ServiceManager;
import springfox.documentation.annotations.ApiIgnore;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.Callable;
import static com.google.common.io.Files.getNameWithoutExtension;
import static org.fao.geonet.api.ApiParams.API_PARAM_RECORD_UUID;
import static org.fao.geonet.api.records.formatters.FormatterConstants.SCHEMA_PLUGIN_FORMATTER_DIR;
import static org.springframework.data.jpa.domain.Specifications.where; |
<<<<<<<
import org.fao.geonet.Logger;
import org.fao.geonet.domain.Source;
import org.fao.geonet.exceptions.BadInputEx;
=======
import jeeves.server.resources.ResourceManager;
import org.fao.geonet.constants.Geonet;
>>>>>>>
<<<<<<<
import org.fao.geonet.repository.SourceRepository;
=======
import org.fao.geonet.kernel.harvest.harvester.HarvestResult;
import org.fao.geonet.lib.Lib;
>>>>>>>
import org.fao.geonet.repository.SourceRepository;
<<<<<<<
Harvester h = new Harvester(log, context, params);
result = h.harvest();
=======
Dbms dbms = (Dbms) rm.open(Geonet.Res.MAIN_DB);
h = new Harvester(log, context, dbms, params);
result = h.harvest(log);
>>>>>>>
Harvester h = new Harvester(log, context, params);
result = h.harvest(log); |
<<<<<<<
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.StringUtils;
=======
import jeeves.server.ServiceConfig;
import jeeves.server.context.ServiceContext;
>>>>>>>
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.StringUtils;
import jeeves.server.ServiceConfig;
import jeeves.server.context.ServiceContext; |
<<<<<<<
*
*
* @param context
* @throws IOException
* @throws JDOMException
=======
>>>>>>>
<<<<<<<
host = sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_HOST);
port = sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_PORT);
from = sm.getValue(Settings.SYSTEM_FEEDBACK_EMAIL);
username = sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_USERNAME);
password = sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_PASSWORD);
useSSL = sm.getValueAsBool(Settings.SYSTEM_FEEDBACK_MAILSERVER_SSL);
useTLS = sm.getValueAsBool(Settings.SYSTEM_FEEDBACK_MAILSERVER_TLS);
=======
host = sm.getValue("system/feedback/mailServer/host");
port = sm.getValue("system/feedback/mailServer/port");
from = sm.getValue("system/feedback/email");
username = sm.getValue("system/feedback/mailServer/username");
password = sm.getValue("system/feedback/mailServer/password");
useSSL = sm.getValueAsBool("system/feedback/mailServer/ssl");
useTLS = sm.getValueAsBool("system/feedback/mailServer/tls");
>>>>>>>
host = sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_HOST);
port = sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_PORT);
from = sm.getValue(Settings.SYSTEM_FEEDBACK_EMAIL);
username = sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_USERNAME);
password = sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_PASSWORD);
useSSL = sm.getValueAsBool(Settings.SYSTEM_FEEDBACK_MAILSERVER_SSL);
useTLS = sm.getValueAsBool(Settings.SYSTEM_FEEDBACK_MAILSERVER_TLS);
<<<<<<<
*
* @param id The metadata id that has been edited.
=======
*
* @param id The metadata id that has been edited.
>>>>>>>
*
* @param id The metadata id that has been edited.
<<<<<<<
*
* @param status The status to set.
* @param metadataIds The set of metadata ids to set status on.
* @param changeDate The date the status was changed.
=======
*
* @param status The status to set.
* @param metadataIds The set of metadata ids to set status on.
* @param changeDate The date the status was changed.
>>>>>>>
*
* @param status The status to set.
* @param metadataIds The set of metadata ids to set status on.
* @param changeDate The date the status was changed.
<<<<<<<
// -------------------------------------------------------------------------
// Private methods
// -------------------------------------------------------------------------
/**
* Unset all operations on 'All' Group. Used when status changes from approved to something else.
*
* @param mdId The metadata id to unset privileges on
*/
private void unsetAllOperations(int mdId) throws Exception {
int allGroup = 1;
for (ReservedOperation op : ReservedOperation.values()) {
dm.forceUnsetOperation(context, mdId, allGroup, op.getId());
}
}
=======
/**
* Unset all operations on 'All' Group. Used when status changes from approved to something
* else.
*
* @param mdId The metadata id to unset privileges on
*/
private void unsetAllOperations(int mdId) throws Exception {
int allGroup = 1;
for (ReservedOperation op : ReservedOperation.values()) {
dm.forceUnsetOperation(context, mdId, allGroup, op.getId());
}
}
>>>>>>>
/**
* Unset all operations on 'All' Group. Used when status changes from approved to something
* else.
*
* @param mdId The metadata id to unset privileges on
*/
private void unsetAllOperations(int mdId) throws Exception {
int allGroup = 1;
for (ReservedOperation op : ReservedOperation.values()) {
dm.forceUnsetOperation(context, mdId, allGroup, op.getId());
}
}
<<<<<<<
*
* @param metadata The selected set of metadata records
* @param changeDate The date that of the change in status
=======
*
* @param metadata The selected set of metadata records
* @param changeDate The date that of the change in status
>>>>>>>
*
* @param metadata The selected set of metadata records
* @param changeDate The date that of the change in status
<<<<<<<
public static final Pattern metadataLuceneField = Pattern.compile("\\{\\{index:([^\\}]+)\\}\\}");
=======
>>>>>>>
<<<<<<<
String statusMetadataDetails = null;
String message = "";
try {
statusMetadataDetails = LangUtils.translate(context.getApplicationContext(), "statusMetadataDetails").get(this.language);
} catch (Exception e) {}
// Fallback on a default value if statusMetadataDetails not resolved
if (statusMetadataDetails == null) {
statusMetadataDetails = "* {{index:title}} ({{serverurl}}/catalog.search#/metadata/{{index:_uuid}})";
}
ArrayList<String> fields = new ArrayList<String>();
Matcher m = metadataLuceneField.matcher(statusMetadataDetails);
=======
String statusMetadataDetails = null;
String message = "";
try {
statusMetadataDetails = LangUtils.translate(context.getApplicationContext(), "statusMetadataDetails").get(this.language);
} catch (Exception e) {
}
// Fallback on a default value if statusMetadataDetails not resolved
if (statusMetadataDetails == null) {
statusMetadataDetails = "* {{index:title}} ({{serverurl}}/catalog.search#/metadata/{{index:_uuid}})";
}
ArrayList<String> fields = new ArrayList<String>();
Matcher m = metadataLuceneField.matcher(statusMetadataDetails);
>>>>>>>
String statusMetadataDetails = null;
String message = "";
try {
statusMetadataDetails = LangUtils.translate(context.getApplicationContext(), "statusMetadataDetails").get(this.language);
} catch (Exception e) {
}
// Fallback on a default value if statusMetadataDetails not resolved
if (statusMetadataDetails == null) {
statusMetadataDetails = "* {{index:title}} ({{serverurl}}/catalog.search#/metadata/{{index:_uuid}})";
}
ArrayList<String> fields = new ArrayList<String>();
Matcher m = metadataLuceneField.matcher(statusMetadataDetails);
<<<<<<<
while (m.find()) {
fields.add(m.group(1));
}
for (Metadata md : mds) {
String curMdDetails = statusMetadataDetails;
// First substitution for variables not stored in the index
curMdDetails = curMdDetails.replace("{{serverurl}}", siteUrl);
for (String f: fields) {
String mdf = XslUtil.getIndexField(null, md.getUuid(), f, this.language);
curMdDetails = curMdDetails.replace("{{index:" + f + "}}", mdf);
}
message = message.concat(curMdDetails + "\r\n");
}
return message;
}
private String getTranslatedStatusName(String statusValueId) {
=======
while (m.find()) {
fields.add(m.group(1));
}
for (Metadata md : mds) {
String curMdDetails = statusMetadataDetails;
// First substitution for variables not stored in the index
curMdDetails = curMdDetails.replace("{{serverurl}}", siteUrl);
for (String f : fields) {
String mdf = XslUtil.getIndexField(null, md.getUuid(), f, this.language);
curMdDetails = curMdDetails.replace("{{index:" + f + "}}", mdf);
}
message = message.concat(curMdDetails + "\r\n");
}
return message;
}
private String getTranslatedStatusName(String statusValueId) {
>>>>>>>
while (m.find()) {
fields.add(m.group(1));
}
for (Metadata md : mds) {
String curMdDetails = statusMetadataDetails;
// First substitution for variables not stored in the index
curMdDetails = curMdDetails.replace("{{serverurl}}", siteUrl);
for (String f : fields) {
String mdf = XslUtil.getIndexField(null, md.getUuid(), f, this.language);
curMdDetails = curMdDetails.replace("{{index:" + f + "}}", mdf);
}
message = message.concat(curMdDetails + "\r\n");
}
return message;
}
private String getTranslatedStatusName(String statusValueId) {
<<<<<<<
*
* @param metadataIds The selected set of metadata records
* @param changeDate The date that of the change in status
=======
*
* @param metadataIds The selected set of metadata records
* @param changeDate The date that of the change in status
>>>>>>>
*
* @param metadataIds The selected set of metadata records
* @param changeDate The date that of the change in status
<<<<<<<
*
* @param users The selected set of users
* @param subject Subject to be used for email notices
* @param status The status being set
* @param changeDate Datestamp of status change
=======
*
* @param users The selected set of users
* @param subject Subject to be used for email notices
* @param status The status being set
* @param changeDate Datestamp of status change
>>>>>>>
*
* @param users The selected set of users
* @param subject Subject to be used for email notices
* @param status The status being set
* @param changeDate Datestamp of status change
<<<<<<<
*
* @param sendTo The recipient email address
* @param subject Subject to be used for email notices
* @param status The status being set on the records
* @param changeDate Datestamp of status change
=======
*
* @param sendTo The recipient email address
* @param subject Subject to be used for email notices
* @param status The status being set on the records
* @param changeDate Datestamp of status change
>>>>>>>
*
* @param sendTo The recipient email address
* @param subject Subject to be used for email notices
* @param status The status being set on the records
* @param changeDate Datestamp of status change |
<<<<<<<
public class XmlSearch implements Service {
private ServiceConfig _config;
private String _searchFast; //true, false, index
//--------------------------------------------------------------------------
//---
//--- Init
//---
//--------------------------------------------------------------------------
public void init(Path appPath, ServiceConfig config) throws Exception {
_config = config;
_searchFast = config.getValue(Geonet.SearchResult.FAST, "true");
}
/**
* Run a search and return results as XML.
*
* @param params All search parameters defined in {@link LuceneIndexField}. <br/> To return only
* results summary, set summaryOnly parameter to 1. Default is 0 (ie.results and
* summary).
*/
public Element exec(Element params, ServiceContext context) throws Exception {
GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME);
SearchManager searchMan = gc.getBean(SearchManager.class);
Element elData = SearchDefaults.getDefaultSearch(context, params);
String sRemote = elData.getChildText(Geonet.SearchResult.REMOTE);
boolean remote = sRemote != null && sRemote.equals(Geonet.Text.ON);
// possibly close old searcher
UserSession session = context.getUserSession();
// perform the search and save search result into session
MetaSearcher searcher;
context.info("Creating searchers");
if (remote) {
searcher = searchMan.newSearcher(SearcherType.Z3950, Geonet.File.SEARCH_Z3950_CLIENT);
} else {
searcher = searchMan.newSearcher(SearcherType.LUCENE, Geonet.File.SEARCH_LUCENE);
}
try {
// Check is user asked for summary only without building summary
String summaryOnly = Util.getParam(params, Geonet.SearchResult.SUMMARY_ONLY, "0");
String sBuildSummary = params.getChildText(Geonet.SearchResult.BUILD_SUMMARY);
if (sBuildSummary != null && sBuildSummary.equals("false") && !"0".equals(summaryOnly)) {
elData.getChild(Geonet.SearchResult.BUILD_SUMMARY).setText("true");
=======
public class XmlSearch implements Service
{
private ServiceConfig _config;
private String _searchFast; //true, false, index
//--------------------------------------------------------------------------
//---
//--- Init
//---
//--------------------------------------------------------------------------
public void init(Path appPath, ServiceConfig config) throws Exception
{
_config = config;
_searchFast = config.getValue(Geonet.SearchResult.FAST, "true");
}
/**
* Run a search and return results as XML.
*
* @param params All search parameters defined in {@link LuceneIndexField}.
* <br/>
* To return only results summary, set summaryOnly parameter to 1.
* Default is 0 (ie.results and summary).
*
*/
public Element exec(Element params, ServiceContext context) throws Exception {
GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME);
SearchManager searchMan = gc.getBean(SearchManager.class);
Element elData = SearchDefaults.getDefaultSearch(context, params);
// possibly close old searcher
UserSession session = context.getUserSession();
// perform the search and save search result into session
MetaSearcher searcher = searchMan.newSearcher(SearcherType.LUCENE, Geonet.File.SEARCH_LUCENE);
try {
// Check is user asked for summary only without building summary
String summaryOnly = Util.getParam(params, Geonet.SearchResult.SUMMARY_ONLY, "0");
String sBuildSummary = params.getChildText(Geonet.SearchResult.BUILD_SUMMARY);
if(sBuildSummary != null && sBuildSummary.equals("false") && ! "0".equals(summaryOnly)) {
elData.getChild(Geonet.SearchResult.BUILD_SUMMARY).setText("true");
>>>>>>>
public class XmlSearch implements Service {
private ServiceConfig _config;
private String _searchFast; //true, false, index
//--------------------------------------------------------------------------
//---
//--- Init
//---
//--------------------------------------------------------------------------
public void init(Path appPath, ServiceConfig config) throws Exception {
_config = config;
_searchFast = config.getValue(Geonet.SearchResult.FAST, "true");
}
/**
* Run a search and return results as XML.
*
* @param params All search parameters defined in {@link LuceneIndexField}. <br/> To return only
* results summary, set summaryOnly parameter to 1. Default is 0 (ie.results and
* summary).
*/
public Element exec(Element params, ServiceContext context) throws Exception {
GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME);
SearchManager searchMan = gc.getBean(SearchManager.class);
Element elData = SearchDefaults.getDefaultSearch(context, params);
// possibly close old searcher
UserSession session = context.getUserSession();
// perform the search and save search result into session
MetaSearcher searcher = searchMan.newSearcher(SearcherType.LUCENE, Geonet.File.SEARCH_LUCENE);
try {
// Check is user asked for summary only without building summary
String summaryOnly = Util.getParam(params, Geonet.SearchResult.SUMMARY_ONLY, "0");
String sBuildSummary = params.getChildText(Geonet.SearchResult.BUILD_SUMMARY);
if (sBuildSummary != null && sBuildSummary.equals("false") && !"0".equals(summaryOnly)) {
elData.getChild(Geonet.SearchResult.BUILD_SUMMARY).setText("true"); |
<<<<<<<
=======
public static final String SYSTEM_SITE_SITE_ID_PATH = "system/site/siteId";
public static final String SYSTEM_SITE_NAME_PATH = "system/site/name";
public static final String SYSTEM_SITE_LABEL_PREFIX = "system/site/labels/";
public static final String CSW_TRANSACTION_XPATH_UPDATE_CREATE_NEW_ELEMENTS = "system/csw/transactionUpdateCreateXPath";
public static final String SYSTEM_PROXY_USE = "system/proxy/use";
public static final String SYSTEM_PROXY_HOST = "system/proxy/host";
public static final String SYSTEM_PROXY_PORT = "system/proxy/port";
public static final String SYSTEM_PROXY_USERNAME = "system/proxy/username";
public static final String SYSTEM_PROXY_PASSWORD = "system/proxy/password";
public static final String SYSTEM_LUCENE_IGNORECHARS = "system/requestedLanguage/ignorechars";
public static final String SYSTEM_REQUESTED_LANGUAGE_SORTED = "system/requestedLanguage/sorted";
public static final String SYSTEM_REQUESTED_LANGUAGE_ONLY = "system/requestedLanguage/only";
public static final String SYSTEM_AUTODETECT_ENABLE = "system/autodetect/enable";
public static final String SYSTEM_XLINKRESOLVER_ENABLE = "system/xlinkResolver/enable";
public static final String SYSTEM_SERVER_LOG = "system/server/log";
public static final String SYSTEM_INSPIRE_ENABLE = "system/inspire/enable";
public static final String SYSTEM_INSPIRE_ATOM = "system/inspire/atom";
public static final String SYSTEM_INSPIRE_ATOM_SCHEDULE = "system/inspire/atomSchedule";
public static final String SYSTEM_PREFER_GROUP_LOGO = "system/metadata/prefergrouplogo";
public static final String ENABLE_ALL_THESAURUS = "system/metadata/allThesaurus";
>>>>>>>
<<<<<<<
*
* @param key The setting key
=======
*
* @param key The setting key
>>>>>>>
*
* @param key The setting key
<<<<<<<
*
* @param key the path/name/key of the setting.
=======
*
* @param key the path/name/key of the setting.
>>>>>>>
*
* @param key the path/name/key of the setting.
<<<<<<<
*
=======
>>>>>>>
<<<<<<<
*
* @param key the key/path/name of the setting.
=======
*
* @param key the key/path/name of the setting.
>>>>>>>
*
* @param key the key/path/name of the setting.
<<<<<<<
*
* @throws SQLException
=======
>>>>>>>
<<<<<<<
setValue(Settings.SYSTEM_SITE_SITE_ID_PATH, siteUuid);
=======
setValue(SYSTEM_SITE_SITE_ID_PATH, siteUuid);
>>>>>>>
setValue(Settings.SYSTEM_SITE_SITE_ID_PATH, siteUuid);
<<<<<<<
String protocol = getValue(Settings.SYSTEM_SERVER_PROTOCOL);
String host = getValue(Settings.SYSTEM_SERVER_HOST);
String port = getValue(Settings.SYSTEM_SERVER_PORT);
String locServ = baseURL +"/"+ nodeInfo.getId() +"/";
=======
String protocol = getValue(Geonet.Settings.SERVER_PROTOCOL);
String host = getValue(Geonet.Settings.SERVER_HOST);
String port = getValue(Geonet.Settings.SERVER_PORT);
String locServ = baseURL + "/" + nodeInfo.getId() + "/";
>>>>>>>
String protocol = getValue(Settings.SYSTEM_SERVER_PROTOCOL);
String host = getValue(Settings.SYSTEM_SERVER_HOST);
String port = getValue(Settings.SYSTEM_SERVER_PORT);
String locServ = baseURL + "/" + nodeInfo.getId() + "/"; |
<<<<<<<
public ObjectReader at(String value) {
return new ObjectReader(this, new JsonPointerBasedFilter(value));
=======
public ObjectReader at(final String pointerExpr) {
_assertNotNull("pointerExpr", pointerExpr);
return new ObjectReader(this, new JsonPointerBasedFilter(pointerExpr));
>>>>>>>
public ObjectReader at(String pointerExpr) {
_assertNotNull("pointerExpr", pointerExpr);
return new ObjectReader(this, new JsonPointerBasedFilter(pointerExpr));
<<<<<<<
public ObjectReader at(JsonPointer pointer) {
=======
public ObjectReader at(final JsonPointer pointer) {
_assertNotNull("pointer", pointer);
>>>>>>>
public ObjectReader at(JsonPointer pointer) {
_assertNotNull("pointer", pointer);
<<<<<<<
protected JsonParser treeAsTokens(JsonNode n, DeserializationContext ctxt) {
return new TreeTraversingParser(n, ctxt);
=======
@Override
public JsonParser treeAsTokens(TreeNode n) {
_assertNotNull("n", n);
// 05-Dec-2017, tatu: Important! Must clear "valueToUpdate" since we do not
// want update to be applied here, as a side effect
ObjectReader codec = withValueToUpdate(null);
return new TreeTraversingParser((JsonNode) n, codec);
>>>>>>>
protected JsonParser treeAsTokens(JsonNode n, DeserializationContext ctxt) {
_assertNotNull("n", n);
return new TreeTraversingParser(n, ctxt);
<<<<<<<
return (T) _bindAsTreeOrNull(createDeserializationContext(p), p);
=======
_assertNotNull("p", p);
return (T) _bindAsTreeOrNull(p);
}
@Override
public void writeTree(JsonGenerator g, TreeNode rootNode) {
throw new UnsupportedOperationException();
>>>>>>>
_assertNotNull("p", p);
return (T) _bindAsTreeOrNull(createDeserializationContext(p), p);
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return (T) _bindAndClose(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
return (T) _detectBindAndClose(_dataFormatReaders.findFormat(src), false);
}
return (T) _bindAndClose(_considerFilter(_parserFactory.createParser(src), false));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return (T) _bindAndClose(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return (T) _bindAndClose(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
_reportUndetectableSource(src);
}
return (T) _bindAndClose(_considerFilter(_parserFactory.createParser(src), false));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return (T) _bindAndClose(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return (T) _bindAndClose(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
return (T) _detectBindAndClose(_dataFormatReaders.findFormat(_inputStream(src)), true);
}
return (T) _bindAndClose(_considerFilter(_parserFactory.createParser(src), false));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return (T) _bindAndClose(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return (T) _bindAndClose(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
return (T) _detectBindAndClose(_dataFormatReaders.findFormat(_inputStream(src)), true);
}
return (T) _bindAndClose(_considerFilter(_parserFactory.createParser(src), false));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return (T) _bindAndClose(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndCloseAsTree(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, in), false));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
return _detectBindAndCloseAsTree(src);
}
return _bindAndCloseAsTree(_considerFilter(_parserFactory.createParser(src), false));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndCloseAsTree(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndCloseAsTree(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, r), false));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
_reportUndetectableSource(src);
}
return _bindAndCloseAsTree(_considerFilter(_parserFactory.createParser(src), false));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndCloseAsTree(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), false));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndCloseAsTree(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, json, offset, len), false));
=======
_assertNotNull("json", json);
if (_dataFormatReaders != null) {
_reportUndetectableSource(json);
}
return _bindAndCloseAsTree(_considerFilter(_parserFactory.createParser(json, offset, len), false));
>>>>>>>
_assertNotNull("content", content);
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndCloseAsTree(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, content, offset, len), false));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndReadValues(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), true));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
return _detectBindAndReadValues(_dataFormatReaders.findFormat(src), false);
}
return _bindAndReadValues(_considerFilter(_parserFactory.createParser(src), true));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndReadValues(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), true));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
JsonParser p = _considerFilter(_parserFactory.createParser(ctxt, src), true);
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
_reportUndetectableSource(src);
}
JsonParser p = _considerFilter(_parserFactory.createParser(src), true);
DeserializationContext ctxt = createDeserializationContext(p);
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
JsonParser p = _considerFilter(_parserFactory.createParser(ctxt, src), true);
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndReadValues(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), true));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
return _detectBindAndReadValues(
_dataFormatReaders.findFormat(_inputStream(src)), false);
}
return _bindAndReadValues(_considerFilter(_parserFactory.createParser(src), true));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndReadValues(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), true));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndReadValues(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), true));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
return _detectBindAndReadValues(
_dataFormatReaders.findFormat(_inputStream(src)), true);
}
return _bindAndReadValues(_considerFilter(_parserFactory.createParser(src), true));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndReadValues(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), true));
<<<<<<<
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndReadValues(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), true));
=======
_assertNotNull("src", src);
if (_dataFormatReaders != null) {
_reportUndetectableSource(src);
}
return _bindAndReadValues(_considerFilter(_parserFactory.createParser(src), true));
>>>>>>>
_assertNotNull("src", src);
DefaultDeserializationContext ctxt = createDeserializationContext();
return _bindAndReadValues(ctxt,
_considerFilter(_parserFactory.createParser(ctxt, src), true));
<<<<<<<
=======
/**
* @since 2.10
*/
protected final JavaType _jsonNodeType() {
JavaType t = _jsonNodeType;
if (t == null) {
t = getTypeFactory().constructType(JsonNode.class);
_jsonNodeType = t;
}
return t;
}
protected final void _assertNotNull(String paramName, Object src) {
if (src == null) {
throw new IllegalArgumentException(String.format("argument \"%s\" is null", paramName));
}
}
>>>>>>> |
<<<<<<<
@EntityListeners(GroupEntityListenerManager.class)
=======
@SequenceGenerator(name=Group.ID_SEQ_NAME, initialValue=100, allocationSize=1)
>>>>>>>
@EntityListeners(GroupEntityListenerManager.class)
@SequenceGenerator(name=Group.ID_SEQ_NAME, initialValue=100, allocationSize=1)
<<<<<<<
=======
static final String ID_SEQ_NAME = "group_id_seq";
>>>>>>>
static final String ID_SEQ_NAME = "group_id_seq"; |
<<<<<<<
for (OperationAllowed opAllowed : opsAllowed) {
if (opAllowed.getId().getOperationId() != ReservedOperation.notify.getId())
continue;
=======
for (OperationAllowed opAllowed : opsAllowed) {
if (opAllowed.getId().getOperationId() != ReservedOperation.notify.getId())
continue;
>>>>>>>
for (OperationAllowed opAllowed : opsAllowed) {
if (opAllowed.getId().getOperationId() != ReservedOperation.notify.getId())
continue;
<<<<<<<
String name = group.getName();
String email = group.getEmail();
if (email != null && email.trim().length() != 0)
{
// TODO i18n
String subject = "File " + fname + " has been downloaded";
String message = "GeoNetwork notifies you, as contact person of group "+ name
+ " that data file "+ fname
+ " belonging metadata "+ id
+ " has beed downloaded from address " + context.getIpAddress() + ".";
try
{
MailSender sender = new MailSender(context);
sender.send(host, Integer.parseInt(port),
sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_USERNAME),
sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_PASSWORD),
sm.getValueAsBool(Settings.SYSTEM_FEEDBACK_MAILSERVER_SSL),
sm.getValueAsBool(Settings.SYSTEM_FEEDBACK_MAILSERVER_TLS),
from, fromDescr, email, null, subject, message);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
}
}
=======
String name = group.getName();
String email = group.getEmail();
if (email != null && email.trim().length() != 0) {
// TODO i18n
String subject = "File " + fname + " has been downloaded";
String message = "GeoNetwork notifies you, as contact person of group " + name
+ " that data file " + fname
+ " belonging metadata " + id
+ " has beed downloaded from address " + context.getIpAddress() + ".";
try {
MailSender sender = new MailSender(context);
sender.send(host, Integer.parseInt(port),
sm.getValue("system/feedback/mailServer/username"),
sm.getValue("system/feedback/mailServer/password"),
sm.getValueAsBool("system/feedback/mailServer/ssl"),
sm.getValueAsBool("system/feedback/mailServer/tls"),
from, fromDescr, email, null, subject, message);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
}
>>>>>>>
String name = group.getName();
String email = group.getEmail();
if (email != null && email.trim().length() != 0) {
// TODO i18n
String subject = "File " + fname + " has been downloaded";
String message = "GeoNetwork notifies you, as contact person of group " + name
+ " that data file " + fname
+ " belonging metadata " + id
+ " has beed downloaded from address " + context.getIpAddress() + ".";
try {
MailSender sender = new MailSender(context);
sender.send(host, Integer.parseInt(port),
sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_USERNAME),
sm.getValue(Settings.SYSTEM_FEEDBACK_MAILSERVER_PASSWORD),
sm.getValueAsBool(Settings.SYSTEM_FEEDBACK_MAILSERVER_SSL),
sm.getValueAsBool(Settings.SYSTEM_FEEDBACK_MAILSERVER_TLS),
from, fromDescr, email, null, subject, message);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
} |
<<<<<<<
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
=======
>>>>>>>
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
<<<<<<<
//---------------------------------------------------------------------------
public String getName()
{
return schemaName;
}
=======
public void setName(String inName) {
schemaName = inName;
this.schemaPlugin = SchemaManager.getSchemaPlugin(schemaName);
}
>>>>>>>
public void setName(String inName) {
schemaName = inName;
this.schemaPlugin = SchemaManager.getSchemaPlugin(schemaName);
}
<<<<<<<
/**
* Get schema directory
*
* @return
*/
@JsonIgnore
public Path getSchemaDir() {
return schemaDir;
}
/**
* Set schema directory
*
* @param schemaDir
*/
public void setSchemaDir(Path schemaDir) {
this.schemaDir = schemaDir;
}
//---------------------------------------------------------------------------
public void setPrimeNS(String theNS)
{
primeNS = theNS;
}
//---------------------------------------------------------------------------
@JsonProperty(value = "targetNamespace")
public String getPrimeNS()
{
return primeNS;
}
//---------------------------------------------------------------------------
public MetadataType getTypeInfo(String type)
{
Logger.log();
if (hmTypes.get(type) == null) return new MetadataType();
else return hmTypes.get(type);
}
//---------------------------------------------------------------------------
public String getElementType(String elem,String parent) throws Exception
{
// two cases here - if we have just one element (or a substitute) with
// this name then return its type
Logger.log();
List<String> childType = hmElements.get(elem);
if (childType == null) {
// Check and see whether we can substitute another element from the
// substitution link
String oldelem = elem;
elem = hmSubsLink.get(elem);
Logger.log();
childType = hmElements.get(elem);
if (childType == null) {
Log.warning(Geonet.SCHEMA_MANAGER, "ERROR: Mismatch between schema and xml: No type for 'element' : "
+ oldelem + " with parent " + parent + ". Returning xs:string");
return "xs:string";
}
}
if (childType.size() == 1) return childType.get(0);
Logger.log();
// OTHERWISE get the type by examining the parent:
// for each parent with that name parent
// 1. retrieve its mdt
List<String> exType = hmElements.get(parent);
if (exType == null) return "xs:string";
=======
/**
* Get schema directory
*/
public Path getSchemaDir() {
return schemaDir;
}
/**
* Set schema directory
*/
public void setSchemaDir(Path schemaDir) {
this.schemaDir = schemaDir;
}
//---------------------------------------------------------------------------
public String getPrimeNS() {
return primeNS;
}
//---------------------------------------------------------------------------
public void setPrimeNS(String theNS) {
primeNS = theNS;
}
//---------------------------------------------------------------------------
public MetadataType getTypeInfo(String type) {
Logger.log();
if (hmTypes.get(type) == null) return new MetadataType();
else return hmTypes.get(type);
}
//---------------------------------------------------------------------------
public String getElementType(String elem, String parent) throws Exception {
// two cases here - if we have just one element (or a substitute) with
// this name then return its type
Logger.log();
List<String> childType = hmElements.get(elem);
if (childType == null) {
// Check and see whether we can substitute another element from the
// substitution link
String oldelem = elem;
elem = hmSubsLink.get(elem);
Logger.log();
childType = hmElements.get(elem);
if (childType == null) {
Log.warning(Geonet.SCHEMA_MANAGER, "ERROR: Mismatch between schema and xml: No type for 'element' : "
+ oldelem + " with parent " + parent + ". Returning xs:string");
return "xs:string";
}
}
if (childType.size() == 1) return childType.get(0);
Logger.log();
// OTHERWISE get the type by examining the parent:
// for each parent with that name parent
// 1. retrieve its mdt
List<String> exType = hmElements.get(parent);
if (exType == null) return "xs:string";
>>>>>>>
/**
* Get schema directory
*/
@JsonIgnore
public Path getSchemaDir() {
return schemaDir;
}
/**
* Set schema directory
*/
public void setSchemaDir(Path schemaDir) {
this.schemaDir = schemaDir;
}
//---------------------------------------------------------------------------
@JsonProperty(value = "targetNamespace")
public String getPrimeNS() {
return primeNS;
}
//---------------------------------------------------------------------------
public void setPrimeNS(String theNS) {
primeNS = theNS;
}
//---------------------------------------------------------------------------
public MetadataType getTypeInfo(String type) {
Logger.log();
if (hmTypes.get(type) == null) return new MetadataType();
else return hmTypes.get(type);
}
//---------------------------------------------------------------------------
public String getElementType(String elem, String parent) throws Exception {
// two cases here - if we have just one element (or a substitute) with
// this name then return its type
Logger.log();
List<String> childType = hmElements.get(elem);
if (childType == null) {
// Check and see whether we can substitute another element from the
// substitution link
String oldelem = elem;
elem = hmSubsLink.get(elem);
Logger.log();
childType = hmElements.get(elem);
if (childType == null) {
Log.warning(Geonet.SCHEMA_MANAGER, "ERROR: Mismatch between schema and xml: No type for 'element' : "
+ oldelem + " with parent " + parent + ". Returning xs:string");
return "xs:string";
}
}
if (childType.size() == 1) return childType.get(0);
Logger.log();
// OTHERWISE get the type by examining the parent:
// for each parent with that name parent
// 1. retrieve its mdt
List<String> exType = hmElements.get(parent);
if (exType == null) return "xs:string";
<<<<<<<
public List<String> getElementValues(String elem,String parent) throws Exception
{
String type = getElementType(elem,parent);
String restricName = elem;
if (type != null) restricName = restricName+"+"+type;
// two cases here - if we have just one element with this name
// then return its values
List<List<String>> childValues = hmRestric.get(restricName);
if (childValues == null) return null;
if (childValues.size() == 1) return childValues.get(0);
// OTHERWISE we don't know what to do so return the first one anyway! This
// should not happen....
Logger.log();
return childValues.get(0);
}
//---------------------------------------------------------------------------
//---
//--- Package protected API methods
//---
//---------------------------------------------------------------------------
void addElement(String name, String type, List<String> alValues, List<String> alSubs, String subLink)
{
// first just add the subs - because these are for global elements we
// never have a clash because global elements are all in the same scope
// and are thus unique
if (alSubs != null && alSubs.size() > 0) hmSubs.put(name,alSubs);
if (subLink != null && subLink.length() > 0) hmSubsLink.put(name,subLink);
List<String> exType = hmElements.get(name);
// it's already there but the type has been added already
if (exType != null && exType.contains(type)) return;
// it's already there but doesn't have this type
if (exType != null && !(exType.contains(type))) {
Logger.log();
// it's not there so add a new list
} else {
hmElements.put(name, exType = new ArrayList<String>());
}
exType.add(type);
String restricName = name;
if (type != null) restricName = name+"+"+type;
// it's already there
List<List<String>> exValues = hmRestric.get(restricName);
if (exValues != null) {
Logger.log();
// it's not there so add a new list of lists
} else {
hmRestric .put(restricName, exValues = new ArrayList<List<String>>());
}
exValues.add(alValues);
}
//---------------------------------------------------------------------------
public void addType(String name, MetadataType mdt)
{
mdt.setName(name);
hmTypes.put(name, mdt);
}
//---------------------------------------------------------------------------
public void addNS(String targetNSPrefix, String targetNSUri)
{
Namespace ns = Namespace.getNamespace(targetNSPrefix, targetNSUri);
hmNameSpaces.put(targetNSPrefix, ns);
hmPrefixes.put(targetNSUri, ns);
}
//---------------------------------------------------------------------------
@JsonIgnore
public String getNS(String targetNSPrefix)
{
Namespace ns = hmNameSpaces.get(targetNSPrefix);
if (ns != null) {
return ns.getURI();
} else {
return null;
}
}
=======
public void addType(String name, MetadataType mdt) {
mdt.setName(name);
hmTypes.put(name, mdt);
}
//---------------------------------------------------------------------------
public void addNS(String targetNSPrefix, String targetNSUri) {
Namespace ns = Namespace.getNamespace(targetNSPrefix, targetNSUri);
hmNameSpaces.put(targetNSPrefix, ns);
hmPrefixes.put(targetNSUri, ns);
}
//---------------------------------------------------------------------------
public String getNS(String targetNSPrefix) {
Namespace ns = hmNameSpaces.get(targetNSPrefix);
if (ns != null) {
return ns.getURI();
} else {
return null;
}
}
>>>>>>>
public void addType(String name, MetadataType mdt) {
mdt.setName(name);
hmTypes.put(name, mdt);
}
//---------------------------------------------------------------------------
public void addNS(String targetNSPrefix, String targetNSUri) {
Namespace ns = Namespace.getNamespace(targetNSPrefix, targetNSUri);
hmNameSpaces.put(targetNSPrefix, ns);
hmPrefixes.put(targetNSUri, ns);
}
//---------------------------------------------------------------------------
@JsonIgnore
public String getNS(String targetNSPrefix) {
Namespace ns = hmNameSpaces.get(targetNSPrefix);
if (ns != null) {
return ns.getURI();
} else {
return null;
}
}
<<<<<<<
//---------------------------------------------------------------------------
@JsonIgnore
public List<Namespace> getSchemaNS()
{
return new ArrayList<Namespace>(hmPrefixes.values());
}
=======
//---------------------------------------------------------------------------
public List<Namespace> getSchemaNS() {
return new ArrayList<Namespace>(hmPrefixes.values());
}
>>>>>>>
//---------------------------------------------------------------------------
@JsonIgnore
public List<Namespace> getSchemaNS() {
return new ArrayList<Namespace>(hmPrefixes.values());
}
<<<<<<<
if(Log.isDebugEnabled(Geonet.SCHEMA_MANAGER)) {
=======
if (Log.isDebugEnabled(Geonet.SCHEMA_MANAGER)) {
>>>>>>>
if (Log.isDebugEnabled(Geonet.SCHEMA_MANAGER)) {
<<<<<<<
}
/**
* Return the list of schematron rules to applied for this schema
* @return
*/
public String[] getSchematronRules() {
if(schematronRules != null) {
return this.schematronRules.clone() ;
} else {
return new String[]{};
}
}
private void setSchematronRules(String[] schematronRules) {
if(schematronRules != null) {
this.schematronRules = schematronRules.clone();
}
}
// -- this info for profile detection methods
public void setRootAppInfoElements(List<Element> rootAppInfoElements) {
this.rootAppInfoElements = rootAppInfoElements;
}
@JsonIgnore
public List<Element> getSchemaAppInfoElements() {
return rootAppInfoElements;
}
/**
* true if schema requires to synch the uuid column schema info
* with the uuid in the metadata record (updated on editing or in UFO).
*
* @return
*/
public boolean isReadwriteUUID() {
return readwriteUUID;
}
public void setReadwriteUUID(boolean readwriteUUID) {
this.readwriteUUID = readwriteUUID;
}
=======
}
>>>>>>>
} |
<<<<<<<
private static final Random RANDOM = new Random();
public static String randomId() {
return "N" + RANDOM.nextInt(Integer.MAX_VALUE);
}
=======
public static String getMax(Object values) {
String[] strings = values.toString().split(" ");
String max = "";
for (int i = 0; i < strings.length; i++) {
String val = strings[i];
if(val.compareTo(max) > 0) {
max = val;
}
}
return max;
}
>>>>>>>
private static final Random RANDOM = new Random();
public static String randomId() {
return "N" + RANDOM.nextInt(Integer.MAX_VALUE);
}
public static String getMax(Object values) {
String[] strings = values.toString().split(" ");
String max = "";
for (int i = 0; i < strings.length; i++) {
String val = strings[i];
if(val.compareTo(max) > 0) {
max = val;
}
}
return max;
} |
<<<<<<<
@Component(CatalogService.BEAN_PREFIX+GetCapabilities.NAME)
public class GetCapabilities extends AbstractOperation implements CatalogService
{
//---------------------------------------------------------------------------
//---
//--- Constructor
//---
//---------------------------------------------------------------------------
=======
@Component(CatalogService.BEAN_PREFIX + GetCapabilities.NAME)
public class GetCapabilities extends AbstractOperation implements CatalogService {
//---------------------------------------------------------------------------
//---
//--- Constructor
//---
//---------------------------------------------------------------------------
>>>>>>>
@Component(CatalogService.BEAN_PREFIX + GetCapabilities.NAME)
public class GetCapabilities extends AbstractOperation implements CatalogService {
//---------------------------------------------------------------------------
//---
//--- Constructor
//---
//---------------------------------------------------------------------------
<<<<<<<
vars.put("$PROTOCOL", sm.getValue(Settings.SYSTEM_SERVER_PROTOCOL));
vars.put("$HOST", sm.getValue(Settings.SYSTEM_SERVER_HOST));
String port = sm.getValue(Settings.SYSTEM_SERVER_PORT);
vars.put("$PORT", "80".equals(port) ? "" : ":" + port);
=======
vars.put("$PROTOCOL", sm.getValue(Geonet.Settings.SERVER_PROTOCOL));
vars.put("$HOST", sm.getValue(Geonet.Settings.SERVER_HOST));
String port = sm.getValue(Geonet.Settings.SERVER_PORT);
vars.put("$PORT", "80".equals(port) ? "" : ":" + port);
>>>>>>>
vars.put("$PROTOCOL", sm.getValue(Settings.SYSTEM_SERVER_PROTOCOL));
vars.put("$HOST", sm.getValue(Settings.SYSTEM_SERVER_HOST));
String port = sm.getValue(Settings.SYSTEM_SERVER_PORT);
vars.put("$PORT", "80".equals(port) ? "" : ":" + port);
<<<<<<<
String providerName = sm.getValue(Settings.SYSTEM_SITE_ORGANIZATION);
vars.put("$PROVIDER_NAME", StringUtils.isNotEmpty(providerName)?providerName:"GeoNetwork opensource");
=======
String providerName = sm.getValue("system/site/organization");
vars.put("$PROVIDER_NAME", StringUtils.isNotEmpty(providerName) ? providerName : "GeoNetwork opensource");
>>>>>>>
String providerName = sm.getValue(Settings.SYSTEM_SITE_ORGANIZATION);
vars.put("$PROVIDER_NAME", StringUtils.isNotEmpty(providerName) ? providerName : "GeoNetwork opensource"); |
<<<<<<<
@EntityListeners(CswCapabilitiesInfoFieldEntityListenerManager.class)
=======
@SequenceGenerator(name=CswCapabilitiesInfoField.ID_SEQ_NAME, initialValue=100, allocationSize=1)
>>>>>>>
@EntityListeners(CswCapabilitiesInfoFieldEntityListenerManager.class)
@SequenceGenerator(name=CswCapabilitiesInfoField.ID_SEQ_NAME, initialValue=100, allocationSize=1) |
<<<<<<<
import org.fao.geonet.kernel.setting.SettingManager;
import org.fao.geonet.utils.Xml;
import org.fao.geonet.exceptions.BadParameterEx;
import org.fao.geonet.exceptions.OperationAbortedEx;
=======
>>>>>>>
<<<<<<<
import org.fao.geonet.utils.*;
=======
>>>>>>>
<<<<<<<
import org.fao.geonet.languages.IsoLanguagesMapper;
=======
import org.fao.geonet.kernel.setting.SettingManager;
>>>>>>>
import org.fao.geonet.kernel.setting.SettingManager;
import org.fao.geonet.languages.IsoLanguagesMapper;
<<<<<<<
final IsoLanguagesMapper isoLanguageMapper = context.getBean(IsoLanguagesMapper.class);
Thesaurus gst = new Thesaurus(isoLanguageMapper, fname, type, dir, newFile, siteURL);
=======
Thesaurus gst = new Thesaurus(context.getApplicationContext(), fname, type, dir, path, siteURL);
>>>>>>>
final IsoLanguagesMapper isoLanguageMapper = context.getBean(IsoLanguagesMapper.class);
Thesaurus gst = new Thesaurus(isoLanguageMapper, fname, type, dir, path, siteURL); |
<<<<<<<
import org.fao.geonet.repository.SourceRepository;
=======
import org.fao.geonet.kernel.harvest.harvester.HarvestResult;
import org.fao.geonet.lib.Lib;
>>>>>>>
import org.fao.geonet.repository.SourceRepository;
<<<<<<<
Harvester h = new Harvester(log, context, params);
result = h.harvest();
=======
Dbms dbms = (Dbms) rm.open(Geonet.Res.MAIN_DB);
h = new Harvester(log, context, dbms, params);
result = h.harvest(log);
>>>>>>>
Harvester h = new Harvester(log, context, params);
result = h.harvest(log); |
<<<<<<<
@EntityListeners(value = {UserEntityListenerManager.class})
=======
@SequenceGenerator(name=User.ID_SEQ_NAME, initialValue=100, allocationSize=1)
>>>>>>>
@EntityListeners(value = {UserEntityListenerManager.class})
@SequenceGenerator(name=User.ID_SEQ_NAME, initialValue=100, allocationSize=1) |
<<<<<<<
settingMan.add("id:"+siteId, "capabUrl", params.capabUrl);
settingMan.add("id:"+siteId, "icon", params.icon);
settingMan.add("id:"+siteId, "rejectDuplicateResource", params.rejectDuplicateResource);
=======
settingMan.add(dbms, "id:"+siteId, "capabUrl", params.capabUrl);
settingMan.add(dbms, "id:"+siteId, "icon", params.icon);
settingMan.add(dbms, "id:"+siteId, "rejectDuplicateResource", params.rejectDuplicateResource);
settingMan.add(dbms, "id:"+siteId, "queryScope", params.queryScope);
settingMan.add(dbms, "id:"+siteId, "hopCount", params.hopCount);
>>>>>>>
settingMan.add("id:"+siteId, "capabUrl", params.capabUrl);
settingMan.add("id:"+siteId, "icon", params.icon);
settingMan.add("id:"+siteId, "rejectDuplicateResource", params.rejectDuplicateResource);
settingMan.add("id:"+siteId, "queryScope", params.queryScope);
settingMan.add("id:"+siteId, "hopCount", params.hopCount); |
<<<<<<<
import com.google.common.annotations.VisibleForTesting;
=======
>>>>>>>
import com.google.common.annotations.VisibleForTesting;
<<<<<<<
=======
import java.io.File;
>>>>>>> |
<<<<<<<
try{
=======
try {
>>>>>>>
try {
<<<<<<<
boolean localRating = settingManager.getValueAsBool(Settings.SYSTEM_LOCALRATING_ENABLE, false);
=======
boolean localRating = settingManager.getValueAsBool("system/localrating/enable", false);
>>>>>>>
boolean localRating = settingManager.getValueAsBool(Settings.SYSTEM_LOCALRATING_ENABLE, false);
<<<<<<<
log.info("End of alignment for : "+ params.getName());
=======
log.info("End of alignment for : " + params.getName());
>>>>>>>
log.info("End of alignment for : " + params.getName());
<<<<<<<
//--------------------------------------------------------------------------
//---
//--- Private methods : updateMetadata
//---
//--------------------------------------------------------------------------
private void updateMetadata(final RecordInfo ri, final String id, final boolean localRating,
final boolean useChangeDate, String localChangeDate) throws Exception
{
final Element md[] = { null };
final Element publicFiles[] = { null };
final Element privateFiles[] = { null };
=======
private void updateMetadata(final RecordInfo ri, final String id, final boolean localRating,
final boolean useChangeDate, String localChangeDate) throws Exception {
final Element md[] = {null};
final Element publicFiles[] = {null};
final Element privateFiles[] = {null};
>>>>>>>
private void updateMetadata(final RecordInfo ri, final String id, final boolean localRating,
final boolean useChangeDate, String localChangeDate) throws Exception {
final Element md[] = {null};
final Element publicFiles[] = {null};
final Element privateFiles[] = {null};
<<<<<<<
try
{
=======
try {
>>>>>>>
try {
<<<<<<<
MEFLib.visit(mefFile, visitor, new IMEFVisitor()
{
public void handleMetadata(Element mdata, int index) throws Exception
{
=======
MEFLib.visit(mefFile, visitor, new IMEFVisitor() {
public void handleMetadata(Element mdata, int index) throws Exception {
>>>>>>>
MEFLib.visit(mefFile, visitor, new IMEFVisitor() {
public void handleMetadata(Element mdata, int index) throws Exception {
<<<<<<<
public void handleMetadataFiles(DirectoryStream<Path> files, Element info, int index) throws Exception
{
=======
public void handleMetadataFiles(DirectoryStream<Path> files, Element info, int index) throws Exception {
>>>>>>>
public void handleMetadataFiles(DirectoryStream<Path> files, Element info, int index) throws Exception {
<<<<<<<
}
public void handleInfo(Element info, int index) throws Exception
{
=======
public void handleInfo(Element info, int index) throws Exception {
>>>>>>>
public void handleInfo(Element info, int index) throws Exception {
<<<<<<<
public void handlePublicFile(String file, String changeDate, InputStream is, int index) throws IOException
{
=======
public void handlePublicFile(String file, String changeDate, InputStream is, int index) throws IOException {
>>>>>>>
public void handlePublicFile(String file, String changeDate, InputStream is, int index) throws IOException {
<<<<<<<
log.warning("Unable to delete mefFile: "+mefFile);
}
=======
log.warning("Unable to delete mefFile: " + mefFile);
}
>>>>>>>
log.warning("Unable to delete mefFile: " + mefFile);
}
<<<<<<<
if (file != null &&
file.getFileName() != null &&
infoFiles != null &&
!existsFile(file.getFileName().toString(), infoFiles)) {
=======
if (file != null &&
file.getFileName() != null &&
infoFiles != null &&
!existsFile(file.getFileName().toString(), infoFiles)) {
>>>>>>>
if (file != null &&
file.getFileName() != null &&
infoFiles != null &&
!existsFile(file.getFileName().toString(), infoFiles)) {
<<<<<<<
//--------------------------------------------------------------------------
//---
//--- Variables
//---
//--------------------------------------------------------------------------
private Logger log;
private ServiceContext context;
private XmlRequest request;
private GeonetParams params;
private DataManager dataMan;
private HarvestResult result;
private CategoryMapper localCateg;
private GroupMapper localGroups;
private UUIDMapper localUuids;
private String processName;
private String preferredSchema;
private Map<String, Object> processParams = new HashMap<String, Object>();
private HashMap<String, HashMap<String, String>> hmRemoteGroups = new HashMap<String, HashMap<String, String>>();
=======
>>>>>>> |
<<<<<<<
List<Pair<String, Pair<Document, List<CategoryPath>>>> docs = buildIndexDocument(schemaDir, metadata, id, moreFields, metadataType, false);
=======
List<Pair<String, Pair<Document, Collection<CategoryPath>>>> docs = buildIndexDocument(schemaDir, metadata, id, moreFields, metadataType, title, false);
>>>>>>>
List<Pair<String, Pair<Document, Collection<CategoryPath>>>> docs = buildIndexDocument(schemaDir, metadata, id, moreFields, metadataType, false);
<<<<<<<
private List<Pair<String,Pair<Document, List<CategoryPath>>>> buildIndexDocument(String schemaDir, Element metadata, String id,
List<Element> moreFields, MetadataType metadataType, boolean group) throws Exception
=======
private List<Pair<String,Pair<Document, Collection<CategoryPath>>>> buildIndexDocument(String schemaDir, Element metadata, String id,
List<Element> moreFields, MetadataType metadataType, String title,
boolean group) throws Exception
>>>>>>>
private List<Pair<String,Pair<Document, Collection<CategoryPath>>>> buildIndexDocument(String schemaDir, Element metadata, String id,
List<Element> moreFields, MetadataType metadataType,
boolean group) throws Exception |
<<<<<<<
import jeeves.server.sources.http.ServletPathFinder;
import org.fao.geonet.NodeInfo;
=======
>>>>>>>
<<<<<<<
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.servlet.ServletContext;
=======
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import javax.annotation.Nonnull;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
>>>>>>>
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.servlet.ServletContext; |
<<<<<<<
=======
import org.apache.log4j.Priority;
import org.fao.geonet.GeonetContext;
>>>>>>>
import org.apache.log4j.Priority;
<<<<<<<
public Element removeHiddenElements(boolean isIndexingTask, Metadata metadata) throws Exception {
String id = String.valueOf(metadata.getId());
Element metadataXml = metadata.getXmlData(false);
if (!isIndexingTask) {
=======
logEmptyWithheld(id, metadataXml, "XmlSerializer.internalSelect", isIndexingTask);
if (!isIndexingTask) {
>>>>>>>
public Element removeHiddenElements(boolean isIndexingTask, Metadata metadata) throws Exception {
String id = String.valueOf(metadata.getId());
Element metadataXml = metadata.getXmlData(false);
logEmptyWithheld(id, metadataXml, "XmlSerializer.internalSelect", isIndexingTask);
if (!isIndexingTask) {
<<<<<<<
}
return metadataXml;
}
=======
}
return (Element) metadataXml.detach();
}
private static final List<Namespace> XML_SELECT_NAMESPACE = Arrays.asList(Geonet.Namespaces.GCO, Geonet.Namespaces.GMD);
private static final String WITHHELD = "withheld";
@SuppressWarnings("serial")
private static final Filter EMPTY_WITHHELD = new Filter() {
@Override
public boolean matches(Object obj) {
if (obj instanceof Element) {
Element elem = (Element) obj;
String withheld = elem.getAttributeValue("nilReason", Geonet.Namespaces.GCO);
if(WITHHELD.equalsIgnoreCase(withheld) && elem.getChildren().size() == 0 && elem.getTextTrim().isEmpty()) {
return true;
}
}
return false;
}
};
private boolean logEmptyWithheld(String id, Element metadata, String methodName, boolean isIndexingTask) {
if (isLoggingEmptyWithHeld()) {
if (Log.isEnabledFor(Geonet.DATA_MANAGER, Priority.WARN_INT)) {
Iterator<?> emptyWithheld = metadata.getDescendants(EMPTY_WITHHELD);
if (emptyWithheld.hasNext()) {
StringBuilder withheld = new StringBuilder();
while (emptyWithheld.hasNext()) {
Element next = (Element) emptyWithheld.next();
withheld.append("\n ");
xpath(withheld, next);
}
Log.warning(Geonet.DATA_MANAGER, "[" + WITHHELD + "] " +
"In method [" + methodName + "] Metadata id=" + id +
" has withheld elements that don't contain any data: " + withheld +
". Is indexing: " + isIndexingTask);
>>>>>>>
}
return metadataXml;
}
private static final List<Namespace> XML_SELECT_NAMESPACE = Arrays.asList(Geonet.Namespaces.GCO, Geonet.Namespaces.GMD);
private static final String WITHHELD = "withheld";
@SuppressWarnings("serial")
private static final Filter EMPTY_WITHHELD = new Filter() {
@Override
public boolean matches(Object obj) {
if (obj instanceof Element) {
Element elem = (Element) obj;
String withheld = elem.getAttributeValue("nilReason", Geonet.Namespaces.GCO);
if(WITHHELD.equalsIgnoreCase(withheld) && elem.getChildren().size() == 0 && elem.getTextTrim().isEmpty()) {
return true;
}
}
return false;
}
};
private boolean logEmptyWithheld(String id, Element metadata, String methodName, boolean isIndexingTask) {
if (isLoggingEmptyWithHeld()) {
if (Log.isEnabledFor(Geonet.DATA_MANAGER, Priority.WARN_INT)) {
Iterator<?> emptyWithheld = metadata.getDescendants(EMPTY_WITHHELD);
if (emptyWithheld.hasNext()) {
StringBuilder withheld = new StringBuilder();
while (emptyWithheld.hasNext()) {
Element next = (Element) emptyWithheld.next();
withheld.append("\n ");
xpath(withheld, next);
}
Log.warning(Geonet.DATA_MANAGER, "[" + WITHHELD + "] " +
"In method [" + methodName + "] Metadata id=" + id +
" has withheld elements that don't contain any data: " + withheld +
". Is indexing: " + isIndexingTask); |
<<<<<<<
public void doHarvest(Logger log) throws Exception {
Harvester h = new Harvester(log, context, params);
serverResults = h.harvest();
=======
protected void doHarvest(Logger log, ResourceManager rm) throws Exception {
Dbms dbms = (Dbms) rm.open(Geonet.Res.MAIN_DB);
h = new Harvester(log, context, dbms, params);
result = h.harvest(log);
>>>>>>>
public void doHarvest(Logger log) throws Exception {
Harvester h = new Harvester(log, context, params);
serverResults = h.harvest(log); |
<<<<<<<
=======
// TODO add group to user
//String group = getHeader(req, config.getGroupKey(), "");
>>>>>>>
// TODO add group to user
//String group = getHeader(req, config.getGroupKey(), "");
<<<<<<<
=======
// TODO add group to user
//if (group.equals("")) {
// group = config.getDefaultGroup();
//}
>>>>>>>
// TODO add group to user
//if (group.equals("")) {
// group = config.getDefaultGroup();
//} |
<<<<<<<
=======
import jeeves.utils.Util;
import jeeves.utils.Xml;
import org.eclipse.emf.common.command.AbortExecutionException;
>>>>>>>
import org.eclipse.emf.common.command.AbortExecutionException;
<<<<<<<
public HarvestResult harvest() throws Exception {
ListIdentifiersRequest req = new ListIdentifiersRequest(context.getBean(GeonetHttpRequestFactory.class));
=======
public HarvestResult harvest(Logger log) throws Exception
{
this.log = log;
ListIdentifiersRequest req = new ListIdentifiersRequest();
>>>>>>>
public HarvestResult harvest() throws Exception {
this.log = log
ListIdentifiersRequest req = new ListIdentifiersRequest(context.getBean(GeonetHttpRequestFactory.class));
<<<<<<<
XmlRequest t = req.getTransport();
t.setUrl(new URL(params.url));
=======
Transport t = req.getTransport();
try {
t.setUrl(new URL(params.url));
} catch (MalformedURLException e1) {
HarvestError harvestError = new HarvestError(e1, log);
harvestError.setDescription(harvestError.getDescription() + " " + params.url);
errors.add(harvestError);
throw new AbortExecutionException(e1);
}
>>>>>>>
XmlRequest t = req.getTransport();
try {
t.setUrl(new URL(params.url));
} catch (MalformedURLException e1) {
HarvestError harvestError = new HarvestError(e1, log);
harvestError.setDescription(harvestError.getDescription() + " " + params.url);
errors.add(harvestError);
throw new AbortExecutionException(e1);
} |
<<<<<<<
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
=======
import org.apache.commons.lang.StringUtils;
>>>>>>>
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.StringUtils;
<<<<<<<
boolean localRating = settingManager.getValueAsBool(Settings.SYSTEM_LOCALRATING_ENABLE, false);
final MetadataRepository metadataRepository = context.getBean(MetadataRepository.class);
=======
String localRating = settingManager.getValue(Settings.SYSTEM_LOCALRATING_ENABLE);
>>>>>>>
String localRating = settingManager.getValue(Settings.SYSTEM_LOCALRATING_ENABLE);
final MetadataRepository metadataRepository = context.getBean(MetadataRepository.class);
<<<<<<<
final IMetadataUtils metadataRepository = context.getBean(IMetadataUtils.class);
final IMetadataManager metadataManager = context.getBean(IMetadataManager.class);
AbstractMetadata metadata;
if (!ri.isMoreRecentThan(date)) {
=======
final MetadataRepository metadataRepository = context.getBean(MetadataRepository.class);
Metadata metadata;
if (!force && !ri.isMoreRecentThan(date)) {
>>>>>>>
final IMetadataUtils metadataRepository = context.getBean(IMetadataUtils.class);
final IMetadataManager metadataManager = context.getBean(IMetadataManager.class);
AbstractMetadata metadata;
if (!force && !ri.isMoreRecentThan(date)) { |
<<<<<<<
SettingManager settingMan = context.getApplicationContext().getBean(SettingManager.class);
// --- Migrate database if an old one is found
migrateDatabase(servletContext, dbms, settingMan, version, subVersion, context.getAppPath());
=======
SettingManager settingMan = null;
HarvesterSettingsManager harvesterSettingsMan = null;
try {
settingMan = new SettingManager(dbms, context.getProviderManager());
harvesterSettingsMan = new HarvesterSettingsManager(dbms, context.getProviderManager());
} catch (Exception e) {
logger.info(" Failed to initialize setting managers. This is probably due to bad Settings table. Error is: " +
e.getMessage() + ". In case of database migration, the setting managers will be reinitialized.");
}
>>>>>>>
SettingManager settingMan = new SettingManager(dbms, context.getProviderManager());
// --- Migrate database if an old one is found
migrateDatabase(servletContext, dbms, settingMan, version, subVersion, context.getAppPath());
<<<<<<<
=======
HarvestManager harvestMan = new HarvestManager(context, gnContext, harvesterSettingsMan, dataMan);
>>>>>>>
HarvestManager harvestMan = new HarvestManager(context, gnContext, harvesterSettingsMan, dataMan);
<<<<<<<
beanFactory.registerSingleton("geonetworkDataManager", dataMan);
beanFactory.registerSingleton("geonetworkSearchManager", searchMan);
beanFactory.registerSingleton("geonetworkSchemaManager", schemaMan);
beanFactory.registerSingleton("geonetworkServiceHandlerConfig", handlerConfig);
beanFactory.registerSingleton("geonetworkOaipmhDisatcher", oaipmhDis);
beanFactory.registerSingleton("geonetworkMetadataNotifierManager", metadataNotifierMan);
beanFactory.registerSingleton("geonetworkSvnManager", svnManager);
beanFactory.registerSingleton("geonetworkThesaurusManager", thesaurusMan);
beanFactory.registerSingleton("geonetworkXmlSerializer", xmlSerializer);
//------------------------------------------------------------------------
//--- initialize harvesting subsystem
logger.info(" - Harvest manager...");
HarvestManager harvestMan = new HarvestManager(context, gnContext, settingMan, dataMan);
beanFactory.registerSingleton("geonetworkHarvestManager", harvestMan);
=======
beanFactory.registerSingleton("accessManager", accessMan);
beanFactory.registerSingleton("dataManager", dataMan);
beanFactory.registerSingleton("searchManager", searchMan);
beanFactory.registerSingleton("schemaManager", schemaMan);
beanFactory.registerSingleton("serviceHandlerConfig", handlerConfig);
beanFactory.registerSingleton("settingManager", settingMan);
beanFactory.registerSingleton("harvesterSettingsMan", harvesterSettingsMan);
beanFactory.registerSingleton("thesaurusManager", thesaurusMan);
beanFactory.registerSingleton("oaipmhDisatcher", oaipmhDis);
beanFactory.registerSingleton("metadataNotifierManager", metadataNotifierMan);
beanFactory.registerSingleton("svnManager", svnManager);
beanFactory.registerSingleton("xmlSerializer", xmlSerializer);
beanFactory.registerSingleton("harvestManager", harvestMan);
>>>>>>>
beanFactory.registerSingleton("accessManager", accessMan);
beanFactory.registerSingleton("dataManager", dataMan);
beanFactory.registerSingleton("searchManager", searchMan);
beanFactory.registerSingleton("schemaManager", schemaMan);
beanFactory.registerSingleton("serviceHandlerConfig", handlerConfig);
beanFactory.registerSingleton("settingManager", settingMan);
beanFactory.registerSingleton("thesaurusManager", thesaurusMan);
beanFactory.registerSingleton("oaipmhDisatcher", oaipmhDis);
beanFactory.registerSingleton("metadataNotifierManager", metadataNotifierMan);
beanFactory.registerSingleton("svnManager", svnManager);
beanFactory.registerSingleton("xmlSerializer", xmlSerializer);
beanFactory.registerSingleton("harvestManager", harvestMan);
<<<<<<<
settingMan.refresh();
DatabaseMigrationTask task = (DatabaseMigrationTask) Class.forName(className).newInstance();
task.update(settingMan, dbms);
=======
// In 2.11, settingsManager was not able to initialized on previous
// version db table due to structure changes
if (settingMan != null && harvesterSettingsMan != null) {
settingMan.refresh(dbms);
DatabaseMigrationTask task = (DatabaseMigrationTask) Class.forName(className).newInstance();
task.update(settingMan, harvesterSettingsMan, dbms);
}
>>>>>>>
// In 2.11, settingsManager was not able to initialized on previous
// version db table due to structure changes
if (settingMan != null && harvesterSettingsMan != null) {
settingMan.refresh();
DatabaseMigrationTask task = (DatabaseMigrationTask) Class.forName(className).newInstance();
task.update(settingMan, harvesterSettingsMan, dbms);
}
<<<<<<<
settingMan.refresh();
=======
if (settingMan != null) {
settingMan.refresh(dbms);
} else {
// Reinitialized settings
settingMan = new SettingManager(dbms, context.getProviderManager());
// Update the logo
String siteId = settingMan.getValue("system/site/siteId");
initLogo(servletContext, dbms, siteId, context.getAppPath());
}
>>>>>>>
if (settingMan != null) {
settingMan.refresh();
} else {
// Reinitialized settings
settingMan = new SettingManager(dbms, context.getProviderManager());
// Update the logo
String siteId = settingMan.getValue("system/site/siteId");
initLogo(servletContext, dbms, siteId, context.getAppPath());
} |
<<<<<<<
import org.fao.geonet.repository.AbstractSpringDataTest;
import org.fao.geonet.repository.SourceRepository;
import org.fao.geonet.repository.UserRepository;
=======
import org.fao.geonet.kernel.search.LuceneConfig;
import org.fao.geonet.kernel.search.SearchManager;
import org.fao.geonet.kernel.search.index.DirectoryFactory;
import org.fao.geonet.kernel.search.spatial.SpatialIndexWriter;
import org.fao.geonet.kernel.setting.SettingManager;
import org.fao.geonet.languages.LanguageDetector;
import org.fao.geonet.repository.AbstractSpringDataTest;
import org.fao.geonet.repository.SourceRepository;
import org.fao.geonet.repository.UserRepository;
import org.fao.geonet.util.ThreadUtils;
import org.fao.geonet.utils.BinaryFile;
>>>>>>>
import org.fao.geonet.repository.AbstractSpringDataTest;
import org.fao.geonet.repository.SourceRepository;
import org.fao.geonet.repository.UserRepository;
<<<<<<<
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
=======
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.sql.DataSource;
>>>>>>>
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
<<<<<<<
public void setup() throws Exception {
testFixture.setup(this);
=======
public void configureAppContext() throws Exception {
synchronized (AbstractCoreIntegrationTest.class) {
setUpDataDirectory();
if (!_dataDirLockFile.exists()) {
FileUtils.touch(_dataDirLockFile);
_dataDirLockFile.deleteOnExit();
}
}
System.setProperty(LuceneConfig.USE_NRT_MANAGER_REOPEN_THREAD, Boolean.toString(true));
// clear out datastore
for (Name name : _datastore.getNames()) {
((FeatureStore<?, ?>) _datastore.getFeatureSource(name)).removeFeatures(Filter.INCLUDE);
}
final String initializedString = "initialized";
final String webappDir = getWebappDir(getClass());
_applicationContext.getBean(GeonetMockServletContext.class).setTestClass(getClass());
LanguageDetector.init(webappDir + _applicationContext.getBean(Geonet.Config.LANGUAGE_PROFILES_DIR, String.class));
final GeonetworkDataDirectory geonetworkDataDirectory = _applicationContext.getBean(GeonetworkDataDirectory.class);
final SyncReport syncReport = synchronizeDataDirectory(
new File(webappDir, "WEB-INF/data"));
final ArrayList<Element> params = getServiceConfigParameterElements();
final ServiceConfig serviceConfig = new ServiceConfig(params);
try {
_applicationContext.getBean(initializedString);
} catch (NoSuchBeanDefinitionException e) {
SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();
AttributeDescriptor geomDescriptor = new AttributeTypeBuilder().crs(DefaultGeographicCRS.WGS84).binding(MultiPolygon.class)
.buildDescriptor("the_geom");
builder.setName("spatialIndex");
builder.add(geomDescriptor);
builder.add(SpatialIndexWriter._IDS_ATTRIBUTE_NAME, String.class);
_datastore.createSchema(builder.buildFeatureType());
_applicationContext.getBeanFactory().registerSingleton("serviceConfig", serviceConfig);
_applicationContext.getBeanFactory().registerSingleton(initializedString, initializedString);
}
NodeInfo nodeInfo = _applicationContext.getBean(NodeInfo.class);
nodeInfo.setId(getGeonetworkNodeId());
nodeInfo.setDefaultNode(isDefaultNode());
TransformerFactoryFactory.init("net.sf.saxon.TransformerFactoryImpl");
geonetworkDataDirectory.init("geonetwork", webappDir, _dataDirectory.getAbsolutePath(),
serviceConfig, null);
_directoryFactory.resetIndex();
final String schemaPluginsDir = geonetworkDataDirectory.getSchemaPluginsDir().getPath();
final String resourcePath = geonetworkDataDirectory.getResourcesDir().getPath();
final SchemaManager schemaManager = _applicationContext.getBean(SchemaManager.class);
if (syncReport.updateSchemaManager || !schemaManager.existsSchema("iso19139")) {
new File(_dataDirectory, "config/schemaplugin-uri-catalog.xml").delete();
final String schemaPluginsCatalogFile = new File(schemaPluginsDir, "/schemaplugin-uri-catalog.xml").getPath();
deploySchema(webappDir, schemaPluginsDir);
_applicationContext.getBean(LuceneConfig.class).configure("WEB-INF/config-lucene.xml");
SchemaManager.registerXmlCatalogFiles(webappDir, schemaPluginsCatalogFile);
schemaManager.configure(_applicationContext, webappDir, resourcePath,
schemaPluginsCatalogFile, schemaPluginsDir, "eng", "iso19139", true);
}
assertTrue(schemaManager.existsSchema("iso19139"));
assertTrue(schemaManager.existsSchema("iso19115"));
assertTrue(schemaManager.existsSchema("dublin-core"));
_applicationContext.getBean(SearchManager.class).init(false, false, "", 100);
_applicationContext.getBean(DataManager.class).init(createServiceContext(), false);
String siteUuid = _dataDirectory.getName();
_applicationContext.getBean(SettingManager.class).setSiteUuid(siteUuid);
final SourceRepository sourceRepository = _applicationContext.getBean(SourceRepository.class);
List<Source> sources = sourceRepository.findAll();
if (sources.isEmpty()) {
sources = new ArrayList<Source>(1);
sources.add(sourceRepository.save(new Source().setLocal(true).setName("Name").setUuid(siteUuid)));
}
final DataSource dataSource = _applicationContext.getBean(DataSource.class);
Connection conn = null;
try {
conn = dataSource.getConnection();
ThreadUtils.init(conn.getMetaData().getURL(), _applicationContext.getBean(SettingManager.class));
} finally {
if (conn != null) {
conn.close();
}
}
}
private void setUpDataDirectory() {
if (_dataDirLockFile != null && _dataDirLockFile.exists() &&
_dataDirLockFile.lastModified() < twoHoursAgo()) {
_dataDirLockFile.delete();
}
if (_dataDirectory == null || _dataDirLockFile.exists()) {
File dir = getClassFile(getClass()).getParentFile();
final String pathToTargetDir = "core/target";
while(!new File(dir, pathToTargetDir).exists()) {
dir = dir.getParentFile();
}
dir = new File(dir, pathToTargetDir+"/integration-test-datadirs");
int i = 0;
while (new File(dir.getPath()+i, DATA_DIR_LOCK_NAME).exists() && new File(dir.getPath()+i, DATA_DIR_LOCK_NAME).exists()) {
i++;
}
while (!new File(dir.getPath()+i).exists() && !new File(dir.getPath()+i).mkdirs()) {
i++;
if (i > 1000) {
throw new Error("Unable to make test data directory");
}
}
_dataDirContainer = new File(dir.getPath()+i);
_dataDirectory = new File(_dataDirContainer, "defaultDataDir");
_dataDirLockFile = new File(_dataDirContainer, DATA_DIR_LOCK_NAME);
}
}
private long twoHoursAgo() {
final Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.HOUR_OF_DAY, -2);
return calendar.getTimeInMillis();
}
private SyncReport synchronizeDataDirectory(File srcDataDir) throws IOException {
SyncReport report = new SyncReport();
boolean deleteNewFilesFromDataDir = _dataDirectory.exists();
final TreeTraverser<File> fileTreeTraverser = Files.fileTreeTraverser();
if (deleteNewFilesFromDataDir ) {
final int prefixPathLength2 = _dataDirectory.getPath().length();
for (File dataDirFile : fileTreeTraverser.postOrderTraversal(_dataDirectory)) {
String relativePath = dataDirFile.getPath().substring(prefixPathLength2);
final File srcFile = new File(srcDataDir, relativePath);
if (!srcFile.exists()) {
if (srcFile.getParent().endsWith("schematron") &&
relativePath.contains("schema_plugins") &&
relativePath.endsWith(".xsl")) {
// don't copy because the schematron xsl files are generated.
// normally they shouldn't be here because they don't need to be in the
// repository but some tests can generate them into the schematrons folder
// so ignore them here.
continue;
}
if (relativePath.contains("/removed/")) {
// Ignore removed files which may contains MEF files
continue;
}
if (relativePath.endsWith("schemaplugin-uri-catalog.xml")) {
// we will handle this special case later.
continue;
}
if (relativePath.contains("resources" + File.separator + "xml" + File.separator + "schemas")) {
// the schemas xml directory is copied by schema manager but since it is schemas we can reuse the directory.
continue;
}
if (dataDirFile.isFile() || dataDirFile.list().length == 0) {
if (!dataDirFile.delete()) {
// a file is holding on to a reference so we can't properly clean the data directory.
// this means we need a new one.
_dataDirectory = null;
setUpDataDirectory();
break;
}
}
report.updateSchemaManager |= relativePath.contains("schema_plugins");
}
}
}
final int prefixPathLength = srcDataDir.getPath().length();
for (File file : fileTreeTraverser.preOrderTraversal(srcDataDir)) {
String relativePath = file.getPath().substring(prefixPathLength);
final File dataDirFile = new File(_dataDirectory, relativePath);
if (file.isFile() && (!dataDirFile.exists() || dataDirFile.lastModified() != file.lastModified())) {
if (file.getParent().endsWith("schematron") && relativePath.contains("schema_plugins") && relativePath.endsWith(".xsl")) {
// don't copy because the schematron xsl files are generated.
// normally they shouldn't be here because they don't need to be in the
// repository but some tests can generate them into the schemtrons folder
// so ignore them here.
continue;
}
if (relativePath.endsWith("schemaplugin-uri-catalog.xml")) {
// we will handle this special case later.
continue;
}
if (!dataDirFile.getParentFile().exists()) {
Files.createParentDirs(dataDirFile);
}
BinaryFile.copy(file, dataDirFile);
dataDirFile.setLastModified(file.lastModified());
report.updateSchemaManager |= relativePath.contains("schema_plugins");
}
}
return report;
}
private void deploySchema(String srcDataDir, String schemaPluginPath) {
// Copy schema plugin
final String schemaModulePath = "schemas";
File schemaModuleDir = new File(srcDataDir + "/../../../../" + schemaModulePath);
if (schemaModuleDir.exists()) {
String[] listOfSchemaToLoad = {"iso19139", "dublin-core", "iso19115", "fgdc-std"};
for (String schema : listOfSchemaToLoad) {
String srcPath = schemaModuleDir + "/" + schema + "/src/main/plugin/" + schema;
String destPath = schemaPluginPath + "/" + schema;
try {
BinaryFile.copyDirectory(new File(srcPath), new File(destPath));
} catch (IOException e) {
e.printStackTrace();
}
}
}
>>>>>>>
public void setup() throws Exception {
testFixture.setup(this); |
<<<<<<<
import org.fao.geonet.util.PasswordUtil;
=======
import org.fao.geonet.util.MailUtil;
>>>>>>>
import org.fao.geonet.util.PasswordUtil;
import org.fao.geonet.util.MailUtil;
<<<<<<<
if (!sendRegistrationEmail(params, password, host, port, from, thisSite, siteURL)) {
return element.addContent(new Element("result").setText("errorEmailToAddressFailed"));
=======
if (!sendRegistrationEmail(params, password, catalogAdminEmail, thisSite, siteURL, sm)) {
dbms.abort();
return element.addContent(new Element("result").setText("errorEmailToAddressFailed"));
>>>>>>>
if (!sendRegistrationEmail(params, password, catalogAdminEmail, thisSite, siteURL, sm)) {
return element.addContent(new Element("result").setText("errorEmailToAddressFailed"));
<<<<<<<
if (!profile.equalsIgnoreCase(Profile.RegisteredUser.name()) && !sendProfileRequest(params, host, port, from, thisSite, siteURL)) {
=======
if (!profile.equalsIgnoreCase(Geonet.Profile.REGISTERED_USER) && !sendProfileRequest(params, catalogAdminEmail, thisSite, siteURL, sm)) {
>>>>>>>
if (!profile.equalsIgnoreCase(Profile.RegisteredUser.name()) && !sendProfileRequest(params, catalogAdminEmail, thisSite, siteURL, sm)) { |
<<<<<<<
* When a remote rating is applied, the local rating is not updated. It will be updated
* on the next harvest run (FIXME ?).
=======
* When a remote rating is applied, the local rating is not updated. It will be updated on the next
* harvest run (FIXME ?).
>>>>>>>
* When a remote rating is applied, the local rating is not updated. It will be updated on the next
* harvest run (FIXME ?).
<<<<<<<
// look up value of localrating/enable
SettingManager settingManager = gc.getBean(SettingManager.class);
boolean localRating = settingManager.getValueAsBool(SYSTEM_LOCALRATING_ENABLE, false);
if (localRating || harvUuid == null)
//--- metadata is local, just rate it
rating = dm.rateMetadata(Integer.valueOf(id), ip, rating);
else
{
//--- the metadata is harvested, is type=geonetwork?
=======
// look up value of localrating/enable
SettingManager settingManager = gc.getBean(SettingManager.class);
boolean localRating = settingManager.getValueAsBool("system/localrating/enable", false);
>>>>>>>
// look up value of localrating/enable
SettingManager settingManager = gc.getBean(SettingManager.class);
boolean localRating = settingManager.getValueAsBool(SYSTEM_LOCALRATING_ENABLE, false); |
<<<<<<<
import org.fao.geonet.NodeInfo;
import org.fao.geonet.kernel.search.index.IndexingList;
import org.fao.geonet.kernel.search.index.IndexingTask;
import org.fao.geonet.repository.specification.*;
import org.fao.geonet.repository.statistic.PathSpec;
import org.fao.geonet.util.FileCopyMgr;
import org.fao.geonet.utils.Log;
import org.fao.geonet.utils.Xml;
import org.fao.geonet.utils.Xml.ErrorHandler;
=======
>>>>>>>
import org.fao.geonet.NodeInfo; |
<<<<<<<
import org.fao.geonet.Logger;
=======
>>>>>>>
import org.fao.geonet.Logger;
<<<<<<<
protected void addCategories(String id, Iterable<String> categories, CategoryMapper localCateg, DataManager dataMan, ServiceContext context, Logger log, String serverCategory) throws Exception {
=======
public void addCategories(String id, Iterable<String> categories, CategoryMapper localCateg, DataManager dataMan, Dbms dbms, ServiceContext context, Logger log, String serverCategory) throws Exception {
>>>>>>>
public void addCategories(String id, Iterable<String> categories, CategoryMapper localCateg, DataManager dataMan, ServiceContext context, Logger log, String serverCategory) throws Exception {
<<<<<<<
protected void addPrivileges(String id, Iterable<Privileges> privilegesIterable, GroupMapper localGroups, DataManager dataMan, ServiceContext context, Logger log) throws Exception {
=======
public void addPrivileges(String id, Iterable<Privileges> privilegesIterable, GroupMapper localGroups, DataManager dataMan, ServiceContext context, Dbms dbms, Logger log) throws Exception {
>>>>>>>
public void addPrivileges(String id, Iterable<Privileges> privilegesIterable, GroupMapper localGroups, DataManager dataMan, ServiceContext context, Logger log) throws Exception { |
<<<<<<<
Format format, boolean skipUUID, boolean resolveXlink, boolean removeXlinkAttribute) throws Exception {
Pair<AbstractMetadata, String> recordAndMetadata =
MEFLib.retrieveMetadata(context, uuid, resolveXlink, removeXlinkAttribute);
AbstractMetadata record = recordAndMetadata.one();
=======
Format format, boolean skipUUID, boolean resolveXlink,
boolean removeXlinkAttribute, boolean addSchemaLocation) throws Exception {
Pair<Metadata, String> recordAndMetadata =
MEFLib.retrieveMetadata(context, uuid, resolveXlink, removeXlinkAttribute, addSchemaLocation);
Metadata record = recordAndMetadata.one();
>>>>>>>
Format format, boolean skipUUID, boolean resolveXlink,
boolean removeXlinkAttribute, boolean addSchemaLocation) throws Exception {
Pair<AbstractMetadata, String> recordAndMetadata =
MEFLib.retrieveMetadata(context, uuid, resolveXlink, removeXlinkAttribute, addSchemaLocation);
AbstractMetadata record = recordAndMetadata.one(); |
<<<<<<<
=======
import static org.quartz.JobKey.jobKey;
import java.io.File;
import java.lang.reflect.Method;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import jeeves.exceptions.BadInputEx;
import jeeves.exceptions.BadParameterEx;
import jeeves.exceptions.JeevesException;
import jeeves.exceptions.OperationAbortedEx;
import jeeves.guiservices.session.JeevesUser;
import jeeves.interfaces.Logger;
import jeeves.resources.dbms.Dbms;
>>>>>>>
<<<<<<<
import org.fao.geonet.Logger;
=======
import org.apache.commons.lang.time.StopWatch;
import org.apache.log4j.DailyRollingFileAppender;
import org.apache.log4j.PatternLayout;
>>>>>>>
import org.fao.geonet.Logger;
<<<<<<<
import org.fao.geonet.domain.*;
import org.fao.geonet.exceptions.BadInputEx;
import org.fao.geonet.exceptions.BadParameterEx;
import org.fao.geonet.exceptions.JeevesException;
import org.fao.geonet.exceptions.OperationAbortedEx;
=======
import org.fao.geonet.csw.common.exceptions.InvalidParameterValueEx;
>>>>>>>
<<<<<<<
import org.fao.geonet.utils.Log;
import org.fao.geonet.utils.QuartzSchedulerUtils;
import org.fao.geonet.utils.Xml;
=======
import org.fao.geonet.services.harvesting.notifier.SendNotification;
>>>>>>>
import org.fao.geonet.utils.Log;
import org.fao.geonet.utils.QuartzSchedulerUtils;
import org.fao.geonet.utils.Xml;
<<<<<<<
=======
/**
* Adds all AbstractHarvester instances
*
* @param context
* @throws Exception
*/
public static void staticInit(ServiceContext context) throws Exception {
register(context, GeonetHarvester .class);
register(context, Geonet20Harvester.class);
register(context, GeoPRESTHarvester.class);
register(context, WebDavHarvester .class);
register(context, CswHarvester .class);
register(context, Z3950Harvester .class);
register(context, Z3950ConfigHarvester .class);
register(context, OaiPmhHarvester .class);
register(context, OgcWxSHarvester .class);
register(context, ThreddsHarvester .class);
register(context, ArcSDEHarvester .class);
register(context, LocalFilesystemHarvester .class);
register(context, WfsFeaturesHarvester .class);
register(context, LocalFilesystemHarvester .class);
}
/**
* Register one instance of {@link AbstractHarvester} on the singleton.
*
* @param context
* @param harvester
* @throws Exception
*/
private static void register(ServiceContext context, Class<?> harvester) throws Exception {
try {
Method initMethod = harvester.getMethod("init", context.getClass());
initMethod.invoke(null, context);
AbstractHarvester<?> ah = (AbstractHarvester<?>) harvester.newInstance();
hsHarvesters.put(ah.getType(), harvester);
}
catch(Exception e) {
throw new Exception("Cannot register harvester : "+harvester, e);
}
}
>>>>>>>
<<<<<<<
public static AbstractHarvester create(String type, ServiceContext context) throws BadParameterEx, OperationAbortedEx {
=======
public static AbstractHarvester<?> create(String type, ServiceContext context, HarvesterSettingsManager sm, DataManager dm) throws BadParameterEx, OperationAbortedEx {
>>>>>>>
public static AbstractHarvester<?> create(String type, ServiceContext context) throws BadParameterEx, OperationAbortedEx {
<<<<<<<
AbstractHarvester ah = context.getApplicationContext().getBean(type, AbstractHarvester.class);
=======
AbstractHarvester<?> ah = (AbstractHarvester<?>) c.newInstance();
>>>>>>>
AbstractHarvester<?> ah = (AbstractHarvester<?>) c.newInstance();
<<<<<<<
doHarvest(log);
=======
doHarvest_(log, rm);
>>>>>>>
doHarvest(log);
<<<<<<<
doHarvest(logger);
=======
doHarvest_(logger, rm);
>>>>>>>
doHarvest(logger);
<<<<<<<
@Transactional
void harvest() {
running = true;
long startTime = System.currentTimeMillis();
try {
=======
void harvest() {
running = true;
long startTime = System.currentTimeMillis();
String logfile = initializeLog();
this.log.info("Starting harvesting of " + this.getParams().name);
try {
>>>>>>>
@Transactional
void harvest() {
running = true;
long startTime = System.currentTimeMillis();
String logfile = initializeLog();
this.log.info("Starting harvesting of " + this.getParams().name);
try {
<<<<<<<
final Logger logger = Log.createLogger(Geonet.HARVESTER);
final String nodeName = getParams().name + " (" + getClass().getSimpleName() + ")";
final String lastRun = new DateTime().withZone(DateTimeZone.forID("UTC")).toString();
try {
login();
//--- update lastRun
settingMan.setValue("harvesting/id:" + id + "/info/lastRun", lastRun);
//--- proper harvesting
logger.info("Started harvesting from node : " + nodeName);
HarvestWithIndexProcessor h = new HarvestWithIndexProcessor(dataMan, logger);
// todo check (was: processwithfastindexing)
h.process();
logger.info("Ended harvesting from node : " + nodeName);
if (getParams().oneRunOnly) {
stop();
=======
errors.clear();
ResourceManager rm = new ResourceManager(context.getMonitorManager(), context.getProviderManager());
Logger logger = Log.createLogger(Geonet.HARVESTER);
String lastRun = new DateTime().withZone(DateTimeZone.forID("UTC")).toString();
String nodeName = getParams().name +" ("+ getClass().getSimpleName() +")";
try {
login();
Dbms dbms = (Dbms) rm.open(Geonet.Res.MAIN_DB);
//--- update lastRun
settingMan.setValue(dbms, "harvesting/id:"+ id +"/info/lastRun", lastRun);
//--- proper harvesting
logger.info("Started harvesting from node : "+ nodeName);
HarvestWithIndexProcessor h = new HarvestWithIndexProcessor(dataMan, logger, rm);
// todo check (was: processwithfastindexing)
h.process();
logger.info("Ended harvesting from node : "+ nodeName);
if (getParams().oneRunOnly){
stop(dbms);
>>>>>>>
errors.clear();
final Logger logger = Log.createLogger(Geonet.HARVESTER);
final String nodeName = getParams().name + " (" + getClass().getSimpleName() + ")";
final String lastRun = new DateTime().withZone(DateTimeZone.forID("UTC")).toString();
try {
login();
//--- update lastRun
settingMan.setValue("harvesting/id:" + id + "/info/lastRun", lastRun);
//--- proper harvesting
logger.info("Started harvesting from node : " + nodeName);
HarvestWithIndexProcessor h = new HarvestWithIndexProcessor(dataMan, logger);
// todo check (was: processwithfastindexing)
h.process();
logger.info("Ended harvesting from node : " + nodeName);
if (getParams().oneRunOnly) {
stop();
<<<<<<<
} catch (Throwable t) {
logger.warning("Raised exception while harvesting from : " + nodeName);
logger.warning(" (C) Class : " + t.getClass().getSimpleName());
logger.warning(" (C) Message : " + t.getMessage());
error = t;
t.printStackTrace();
}
long elapsedTime = (System.currentTimeMillis() - startTime) / 1000;
=======
rm.close();
} catch (InvalidParameterValueEx e) {
logger.error("The harvester " + this.getParams().name + "["
+ this.getType()
+ "] didn't accept some of the parameters sent.");
errors.add(new HarvestError(e, logger));
error = e;
try {
rm.abort();
} catch (Exception ex) {
logger.fatal("CANNOT ABORT EXCEPTION");
logger.fatal(" (C) Exc : " + ex.getMessage());
}
}
catch(Throwable t) {
logger.warning("Raised exception while harvesting from : "+ nodeName);
logger.warning(" (C) Class : "+ t.getClass().getSimpleName());
logger.warning(" (C) Message : "+ t.getMessage());
errors.add(new HarvestError(t, logger));
error = t;
t.printStackTrace();
try {
rm.abort();
}
catch (Exception ex) {
logger.warning("CANNOT ABORT EXCEPTION");
logger.warning(" (C) Exc : "+ ex);
}
} finally {
List<HarvestError> harvesterErrors = getErrors();
if (harvesterErrors != null) {
errors.addAll(harvesterErrors);
}
}
long elapsedTime = (System.currentTimeMillis() - startTime) / 1000;
>>>>>>>
} catch (InvalidParameterValueEx e) {
logger.error("The harvester " + this.getParams().name + "["
+ this.getType()
+ "] didn't accept some of the parameters sent.");
errors.add(new HarvestError(e, logger));
error = e;
} catch (Throwable t) {
logger.warning("Raised exception while harvesting from : " + nodeName);
logger.warning(" (C) Class : " + t.getClass().getSimpleName());
logger.warning(" (C) Message : " + t.getMessage());
error = t;
t.printStackTrace();
errors.add(new HarvestError(t, logger));
} finally {
List<HarvestError> harvesterErrors = getErrors();
if (harvesterErrors != null) {
errors.addAll(harvesterErrors);
}
}
long elapsedTime = (System.currentTimeMillis() - startTime) / 1000;
<<<<<<<
public abstract void doHarvest(Logger l) throws Exception;
=======
protected abstract void doHarvest(Logger l, ResourceManager rm) throws Exception;
/**
* Outer function to get the execution time and common code.
* @param log
* @param rm
* @throws Exception
*/
protected void doHarvest_(Logger log, ResourceManager rm) throws Exception
{
doHarvest(log, rm);
}
>>>>>>>
public abstract void doHarvest(Logger l) throws Exception;
/**
* Outer function to get the execution time and common code.
* @param log
* @throws Exception
*/
protected void doHarvest_(Logger log) throws Exception
{
doHarvest(log);
}
<<<<<<<
=======
private static Map<String, Class<?>> hsHarvesters = new HashMap<String, Class<?>>();
protected IHarvester<T> h = null;
>>>>>>>
protected IHarvester<T> h = null; |
<<<<<<<
import org.fao.geonet.repository.SourceRepository;
=======
import org.fao.geonet.kernel.harvest.harvester.HarvestResult;
import org.fao.geonet.lib.Lib;
>>>>>>>
import org.fao.geonet.repository.SourceRepository;
<<<<<<<
Harvester h = new Harvester(log, context, params);
result = h.harvest();
=======
Dbms dbms = (Dbms) rm.open(Geonet.Res.MAIN_DB);
h = new Harvester(log, context, dbms, params);
result = h.harvest(log);
>>>>>>>
Harvester h = new Harvester(log, context, params);
result = h.harvest(log); |
<<<<<<<
=======
import com.vividsolutions.jts.util.Assert;
import org.apache.lucene.document.Document;
import org.fao.geonet.entitylistener.MetadataEntityListenerManager;
import org.fao.geonet.utils.Xml;
import org.hibernate.annotations.Type;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import java.io.IOException;
import java.io.Serializable;
>>>>>>>
import java.io.Serializable;
<<<<<<<
public class Metadata extends AbstractMetadata {
=======
@SequenceGenerator(name = Metadata.ID_SEQ_NAME, initialValue = 100, allocationSize = 1)
public class Metadata extends GeonetEntity implements Serializable {
private static final long serialVersionUID = -5557599895424227101L;
>>>>>>>
@SequenceGenerator(name = Metadata.ID_SEQ_NAME, initialValue = 100, allocationSize = 1)
public class Metadata extends AbstractMetadata implements Serializable {
private static final long serialVersionUID = -5557599895424227101L; |
<<<<<<<
this.metadataUtils = context.getBean(IMetadataUtils.class);
this.settingMan = context.getBean(HarvesterSettingsManager.class);
=======
this.harvesterSettingsManager = context.getBean(HarvesterSettingsManager.class);
this.settingManager = context.getBean(SettingManager.class);
>>>>>>>
this.metadataUtils = context.getBean(IMetadataUtils.class);
this.harvesterSettingsManager = context.getBean(HarvesterSettingsManager.class);
this.settingManager = context.getBean(SettingManager.class);
<<<<<<<
final Specifications<? extends AbstractMetadata> ownedByHarvester = Specifications.where(MetadataSpecs.hasHarvesterUuid(getParams().getUuid()));
=======
final Specifications<Metadata> ownedByHarvester = Specifications.where(MetadataSpecs.hasHarvesterUuid(getParams().getUuid()));
>>>>>>>
final Specifications<? extends AbstractMetadata> ownedByHarvester = Specifications.where(MetadataSpecs.hasHarvesterUuid(getParams().getUuid())); |
<<<<<<<
MetadataType.METADATA);
=======
MetadataType.METADATA, metadata.getDataInfo().getTitle(), false);
>>>>>>>
MetadataType.METADATA, false); |
<<<<<<<
final ServiceContext context = createServiceContext(locale.getISO3Language(),
formatType, request.getNativeRequest(HttpServletRequest.class));
AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, servletRequest);
=======
final String language = LanguageUtils.locale2gnCode(locale.getISO3Language());
final ServiceContext context = createServiceContext(
language,
formatType,
request.getNativeRequest(HttpServletRequest.class));
Metadata metadata = ApiUtils.canViewRecord(metadataUuid, servletRequest);
>>>>>>>
final String language = LanguageUtils.locale2gnCode(locale.getISO3Language());
final ServiceContext context = createServiceContext(
language,
formatType,
request.getNativeRequest(HttpServletRequest.class));
AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, servletRequest);
<<<<<<<
AbstractMetadata md = loadMetadata(context.getBean(IMetadataUtils.class), id);
Element metadata = context.getBean(XmlSerializer.class).removeHiddenElements(false, md, false);
=======
XmlSerializer serializer = context.getBean(XmlSerializer.class);
boolean doXLinks = serializer.resolveXLinks();
Metadata md = loadMetadata(context.getBean(MetadataRepository.class), id);
>>>>>>>
AbstractMetadata md = loadMetadata(context.getBean(IMetadataUtils.class), id);
XmlSerializer serializer = context.getBean(XmlSerializer.class);
boolean doXLinks = serializer.resolveXLinks(); |
<<<<<<<
import org.fao.geonet.domain.AbstractMetadata;
=======
import org.fao.geonet.domain.Metadata;
import org.fao.geonet.domain.userfeedback.RatingsSetting;
>>>>>>>
import org.fao.geonet.domain.AbstractMetadata;
import org.fao.geonet.domain.userfeedback.RatingsSetting; |
<<<<<<<
@EntityListeners(MetadataEntityListenerManager.class)
=======
@SequenceGenerator(name=Metadata.ID_SEQ_NAME, initialValue=100, allocationSize=1)
>>>>>>>
@EntityListeners(MetadataEntityListenerManager.class)
@SequenceGenerator(name=Metadata.ID_SEQ_NAME, initialValue=100, allocationSize=1)
<<<<<<<
=======
static final String ID_SEQ_NAME = "metadata_id_seq";
>>>>>>>
static final String ID_SEQ_NAME = "metadata_id_seq"; |
<<<<<<<
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.utils.BinaryFile;
=======
import org.fao.geonet.constants.Geonet;
>>>>>>>
import org.fao.geonet.constants.Geonet;
<<<<<<<
import java.io.File;
import java.io.IOException;
=======
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
>>>>>>>
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
<<<<<<<
private String webappDir;
private String systemDataDir;
private File luceneDir;
private File spatialIndexPath;
private File configDir;
private File thesauriDir;
private File schemaPluginsDir;
private File metadataDataDir;
private File metadataRevisionDir;
private File resourcesDir;
private File htmlCacheDir;
private File formatterDir;
=======
private Path webappDir;
private Path systemDataDir;
private Path luceneDir;
private Path spatialIndexPath;
private Path configDir;
private Path thesauriDir;
private Path schemaPluginsDir;
private Path metadataDataDir;
private Path metadataRevisionDir;
private Path resourcesDir;
private Path htmlCacheDir;
private Path uploadDir;
>>>>>>>
private Path webappDir;
private Path systemDataDir;
private Path luceneDir;
private Path spatialIndexPath;
private Path configDir;
private Path thesauriDir;
private Path schemaPluginsDir;
private Path metadataDataDir;
private Path metadataRevisionDir;
private Path resourcesDir;
private Path htmlCacheDir;
private Path uploadDir;
private Path formatterDir;
<<<<<<<
Log.info(Geonet.DATA_DIRECTORY, " - Data directory is: "
+ systemDataDir);
// Set subfolder data directory
luceneDir = setDir(jeevesServlet, webappName, handlerConfig, systemDataDir, ".lucene" + KEY_SUFFIX,
"index", Geonet.Config.LUCENE_DIR);
spatialIndexPath = setDir(jeevesServlet, "", handlerConfig, systemDataDir, "spatial" + KEY_SUFFIX,
"spatialindex", null);
configDir = setDir(jeevesServlet, webappName, handlerConfig, systemDataDir, ".config" + KEY_SUFFIX,
"config", Geonet.Config.CONFIG_DIR);
thesauriDir = setDir(jeevesServlet, webappName, handlerConfig, systemDataDir,
".codeList" + KEY_SUFFIX, "config" + File.separator + "codelist",
Geonet.Config.CODELIST_DIR);
schemaPluginsDir = setDir(jeevesServlet, webappName, handlerConfig, systemDataDir, ".schema" + KEY_SUFFIX,
"config" + File.separator + "schema_plugins",
Geonet.Config.SCHEMAPLUGINS_DIR);
metadataDataDir = setDir(jeevesServlet, webappName, handlerConfig, systemDataDir, ".data" + KEY_SUFFIX,
"data" + File.separator + "metadata_data",
Geonet.Config.DATA_DIR);
metadataRevisionDir = setDir(jeevesServlet, webappName, handlerConfig, systemDataDir, ".svn" + KEY_SUFFIX,
"data" + File.separator + "metadata_subversion",
Geonet.Config.SUBVERSION_PATH);
formatterDir = setDir(jeevesServlet, webappName, handlerConfig, systemDataDir, ".formatter" + KEY_SUFFIX,
"data" + File.separator + "formatter",
Geonet.Config.SUBVERSION_PATH);
resourcesDir = setDir(jeevesServlet, webappName, handlerConfig, systemDataDir,
".resources" + KEY_SUFFIX, "data" + File.separator + "resources",
Geonet.Config.RESOURCES_DIR);
htmlCacheDir = new File(handlerConfig.getValue(Geonet.Config.RESOURCES_DIR), "htmlcache");
handlerConfig.setValue(Geonet.Config.HTMLCACHE_DIR, htmlCacheDir.getAbsolutePath());
=======
Log.info(Geonet.DATA_DIRECTORY, " - Data directory is: "
+ systemDataDir);
// Set subfolder data directory
luceneDir = setDir(jeevesServlet, webappName, handlerConfig, ".lucene" + KEY_SUFFIX,
Geonet.Config.LUCENE_DIR, "index");
spatialIndexPath = setDir(jeevesServlet, "", handlerConfig, "spatial" + KEY_SUFFIX,
null, "spatialindex");
configDir = setDir(jeevesServlet, webappName, handlerConfig, ".config" + KEY_SUFFIX,
Geonet.Config.CONFIG_DIR, "config");
thesauriDir = setDir(jeevesServlet, webappName, handlerConfig,
".codeList" + KEY_SUFFIX, Geonet.Config.CODELIST_DIR, "config", "codelist"
);
schemaPluginsDir = setDir(jeevesServlet, webappName, handlerConfig, ".schema" + KEY_SUFFIX,
Geonet.Config.SCHEMAPLUGINS_DIR, "config", "schema_plugins"
);
metadataDataDir = setDir(jeevesServlet, webappName, handlerConfig, ".data" + KEY_SUFFIX,
Geonet.Config.DATA_DIR, "data", "metadata_data"
);
metadataRevisionDir = setDir(jeevesServlet, webappName, handlerConfig, ".svn" + KEY_SUFFIX,
Geonet.Config.SUBVERSION_PATH, "data", "metadata_subversion"
);
resourcesDir = setDir(jeevesServlet, webappName, handlerConfig,
".resources" + KEY_SUFFIX, Geonet.Config.RESOURCES_DIR, "data", "resources"
);
uploadDir = setDir(jeevesServlet, webappName, handlerConfig,
".upload" + KEY_SUFFIX, Geonet.Config.UPLOAD_DIR, "data", "upload"
);
htmlCacheDir = IO.toPath(handlerConfig.getValue(Geonet.Config.RESOURCES_DIR), "htmlcache");
handlerConfig.setValue(Geonet.Config.HTMLCACHE_DIR, htmlCacheDir.toAbsolutePath().toString());
>>>>>>>
Log.info(Geonet.DATA_DIRECTORY, " - Data directory is: "
+ systemDataDir);
// Set subfolder data directory
luceneDir = setDir(jeevesServlet, webappName, handlerConfig, ".lucene" + KEY_SUFFIX,
Geonet.Config.LUCENE_DIR, "index");
spatialIndexPath = setDir(jeevesServlet, "", handlerConfig, "spatial" + KEY_SUFFIX,
null, "spatialindex");
configDir = setDir(jeevesServlet, webappName, handlerConfig, ".config" + KEY_SUFFIX,
Geonet.Config.CONFIG_DIR, "config");
thesauriDir = setDir(jeevesServlet, webappName, handlerConfig,
".codeList" + KEY_SUFFIX, Geonet.Config.CODELIST_DIR, "config", "codelist"
);
schemaPluginsDir = setDir(jeevesServlet, webappName, handlerConfig, ".schema" + KEY_SUFFIX,
Geonet.Config.SCHEMAPLUGINS_DIR, "config", "schema_plugins"
);
metadataDataDir = setDir(jeevesServlet, webappName, handlerConfig, ".data" + KEY_SUFFIX,
Geonet.Config.DATA_DIR, "data", "metadata_data"
);
metadataRevisionDir = setDir(jeevesServlet, webappName, handlerConfig, ".svn" + KEY_SUFFIX,
Geonet.Config.SUBVERSION_PATH, "data", "metadata_subversion"
);
resourcesDir = setDir(jeevesServlet, webappName, handlerConfig,
".resources" + KEY_SUFFIX, Geonet.Config.RESOURCES_DIR, "data", "resources"
);
uploadDir = setDir(jeevesServlet, webappName, handlerConfig,
".upload" + KEY_SUFFIX, Geonet.Config.UPLOAD_DIR, "data", "upload"
);
formatterDir = setDir(jeevesServlet, webappName, handlerConfig,
".formatter" + KEY_SUFFIX, Geonet.Config.FORMATTER_PATH, "data", "formatter");
htmlCacheDir = IO.toPath(handlerConfig.getValue(Geonet.Config.RESOURCES_DIR), "htmlcache");
handlerConfig.setValue(Geonet.Config.HTMLCACHE_DIR, htmlCacheDir.toAbsolutePath().toString());
<<<<<<<
public File getFormatterDir() {
return formatterDir;
}
public void setFormatterDir(File formatterDir) {
this.formatterDir = formatterDir;
}
=======
public Path resolveWebResource(String resourcePath) {
if (resourcePath.charAt(0) == '/' || resourcePath.charAt(0) == '\\') {
resourcePath = resourcePath.substring(1);
}
return this.webappDir.resolve(resourcePath);
}
>>>>>>>
public Path getFormatterDir() {
return formatterDir;
}
public void setFormatterDir(Path formatterDir) {
this.formatterDir = formatterDir;
}
public Path resolveWebResource(String resourcePath) {
if (resourcePath.charAt(0) == '/' || resourcePath.charAt(0) == '\\') {
resourcePath = resourcePath.substring(1);
}
return this.webappDir.resolve(resourcePath);
} |
<<<<<<<
//---------------------------------------------------------------------------
//---
//--- API methods
//---
//---------------------------------------------------------------------------
public String getSiteName()
{
=======
//---------------------------------------------------------------------------
//---
//--- API methods
//---
//---------------------------------------------------------------------------
public String getSiteName() {
>>>>>>>
public String getSiteName() {
<<<<<<<
String protocol;
Integer port;
String host = settingManager.getValue(Settings.SYSTEM_SERVER_HOST);
Integer secureport = toIntOrNull(Settings.SYSTEM_SERVER_SECURE_PORT);
Integer insecureport = toIntOrNull(Settings.SYSTEM_SERVER_PORT);
if (secureUrl) {
=======
String protocol;
Integer port;
String host = settingManager.getValue(Geonet.Settings.SERVER_HOST);
Integer secureport = toIntOrNull(Geonet.Settings.SERVER_SECURE_PORT);
Integer insecureport = toIntOrNull(Geonet.Settings.SERVER_PORT);
if (secureUrl) {
>>>>>>>
String protocol;
Integer port;
String host = settingManager.getValue(Settings.SYSTEM_SERVER_HOST);
Integer secureport = toIntOrNull(Settings.SYSTEM_SERVER_SECURE_PORT);
Integer insecureport = toIntOrNull(Settings.SYSTEM_SERVER_PORT);
if (secureUrl) {
<<<<<<<
}
}
=======
}
}
>>>>>>>
}
}
<<<<<<<
String value = settingManager.getValue(Settings.SYSTEM_SELECTIONMANAGER_MAXRECORDS);
if (value == null) value = "10000";
return value;
}
=======
String value = settingManager.getValue("system/selectionmanager/maxrecords");
if (value == null) value = "10000";
return value;
}
>>>>>>>
String value = settingManager.getValue(Settings.SYSTEM_SELECTIONMANAGER_MAXRECORDS);
if (value == null) value = "10000";
return value;
}
<<<<<<<
String value = settingManager.getValue(Settings.SYSTEM_AUTODETECT_ENABLE);
if(value == null) {
=======
String value = settingManager.getValue("system/autodetect/enable");
if (value == null) {
>>>>>>>
String value = settingManager.getValue(Settings.SYSTEM_AUTODETECT_ENABLE);
if (value == null) {
<<<<<<<
String value = settingManager.getValue(Settings.SYSTEM_REQUESTEDLANGUAGE_SORTED);
if(value == null) {
=======
String value = settingManager.getValue("system/requestedLanguage/sorted");
if (value == null) {
>>>>>>>
String value = settingManager.getValue(Settings.SYSTEM_REQUESTEDLANGUAGE_SORTED);
if (value == null) {
<<<<<<<
String value = settingManager.getValue(Settings.SYSTEM_INDEXOPTIMIZER_ENABLE);
if (value == null) return false;
else return value.equals("true");
}
//---------------------------------------------------------------------------
=======
String value = settingManager.getValue("system/indexoptimizer/enable");
if (value == null) return false;
else return value.equals("true");
}
>>>>>>>
String value = settingManager.getValue(Settings.SYSTEM_INDEXOPTIMIZER_ENABLE);
if (value == null) return false;
else return value.equals("true");
}
<<<<<<<
String value = settingManager.getValue(Settings.SYSTEM_XLINKRESOLVER_ENABLE);
if (value == null) return false;
else return value.equals("true");
}
=======
String value = settingManager.getValue("system/xlinkResolver/enable");
if (value == null) return false;
else return value.equals("true");
}
>>>>>>>
String value = settingManager.getValue(Settings.SYSTEM_XLINKRESOLVER_ENABLE);
if (value == null) return false;
else return value.equals("true");
}
<<<<<<<
String value = settingManager.getValue(Settings.SYSTEM_SEARCHSTATS);
if (value == null) return false;
else return value.equals("true");
}
=======
String value = settingManager.getValue("system/searchStats/enable");
if (value == null) return false;
else return value.equals("true");
}
>>>>>>>
String value = settingManager.getValue(Settings.SYSTEM_SEARCHSTATS);
if (value == null) return false;
else return value.equals("true");
}
<<<<<<<
String ignoreChars = settingManager.getValue(Settings.SYSTEM_LUCENE_IGNORECHARS);
if(ignoreChars == null || ignoreChars.length() == 0) {
=======
String ignoreChars = settingManager.getValue(SettingManager.SYSTEM_LUCENE_IGNORECHARS);
if (ignoreChars == null || ignoreChars.length() == 0) {
>>>>>>>
String ignoreChars = settingManager.getValue(Settings.SYSTEM_LUCENE_IGNORECHARS);
if (ignoreChars == null || ignoreChars.length() == 0) { |
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, "@class")
=======
ObjectMapper om = JsonMapper.builder()
.activateDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL, "@class")
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, "@class") |
<<<<<<<
=======
import jeeves.utils.Xml;
>>>>>>>
<<<<<<<
import java.util.*;
=======
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
>>>>>>>
import java.util.*;
<<<<<<<
if(log.isDebugEnabled()) log.debug(" - Removing old metadata before update with id: " + id);
dataMan.deleteMetadataGroup(context, id);
=======
if(this.log.isDebugEnabled()) log.debug(" - Removing old metadata before update with id: " + id);
dataMan.deleteMetadataGroup(context, dbms, id);
>>>>>>>
if(this.log.isDebugEnabled()) log.debug(" - Removing old metadata before update with id: " + id);
dataMan.deleteMetadataGroup(context, id);
<<<<<<<
private final Logger log;
=======
private Logger log;
private final Dbms dbms;
>>>>>>>
private final Logger log; |
<<<<<<<
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
=======
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import jeeves.server.context.ServiceContext;
>>>>>>>
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import jeeves.server.context.ServiceContext;
<<<<<<<
import org.fao.geonet.services.thumbnail.Set;
=======
import org.fao.geonet.repository.MetadataRepository;
>>>>>>>
import org.fao.geonet.services.thumbnail.Set;
import org.fao.geonet.repository.MetadataRepository; |
<<<<<<<
final List<Object> nodeList = trySelectNode(metadataRecord, metadataSchema, xpathProperty, true).results;
=======
// Removes root metadata element for xpath filters
xpathProperty = cleanRootFromXPath(xpathProperty, metadataRecord);
final Object propNode = trySelectNode(metadataRecord, metadataSchema, xpathProperty).result;
>>>>>>>
xpathProperty = cleanRootFromXPath(xpathProperty, metadataRecord);
final List<Object> nodeList = trySelectNode(metadataRecord, metadataSchema, xpathProperty, true).results; |
<<<<<<<
public static SystemInfo createForTesting(String stagingProfile) {
return new SystemInfo(stagingProfile, "testing", "3.0.0", "SNAPSHOT", "testing", "testing", "testing", "", "");
}
=======
>>>>>>>
public static SystemInfo createForTesting(String stagingProfile) {
return new SystemInfo(stagingProfile, "testing", "3.0.0", "SNAPSHOT", "testing", "testing", "testing", "", "");
}
<<<<<<<
new Element("stagingProfile").setText(this.stagingProfile),
new Element("buildOsInfo").setText(this.buildOsInfo),
new Element("buildJavaVendor").setText(this.buildJavaVendor),
new Element("buildJavaVersion").setText(this.buildJavaVersion),
new Element("buildDate").setText(this.buildDate),
new Element("scmRevision").setText(this.scmRevision)
=======
new Element("stagingProfile").setText(this.stagingProfile),
new Element("buildOsInfo").setText(this.buildOsInfo),
new Element("buildJavaVendor").setText(this.buildJavaVendor),
new Element("buildJavaVersion").setText(this.buildJavaVersion),
new Element("buildDate").setText(this.buildDate)
>>>>>>>
new Element("stagingProfile").setText(this.stagingProfile),
new Element("buildOsInfo").setText(this.buildOsInfo),
new Element("buildJavaVendor").setText(this.buildJavaVendor),
new Element("buildJavaVersion").setText(this.buildJavaVersion),
new Element("buildDate").setText(this.buildDate),
new Element("scmRevision").setText(this.scmRevision)
<<<<<<<
public static SystemInfo getInfo() {
return getInfo(null);
}
public static SystemInfo getInfo(SystemInfo defaultInfo) {
SystemInfo actualInfo = defaultInfo;
if (actualInfo == null && ApplicationContextHolder.get() != null) {
actualInfo = ApplicationContextHolder.get().getBean(SystemInfo.class);
}
return actualInfo;
}
public String getScmRevision() {
return scmRevision;
}
public void setScmRevision(String scmRevision) {
this.scmRevision = scmRevision;
}
public String getScmUrl() {
return scmUrl;
}
public void setScmUrl(String scmUrl) {
this.scmUrl = scmUrl;
}
=======
>>>>>>>
public static SystemInfo getInfo(SystemInfo defaultInfo) {
SystemInfo actualInfo = defaultInfo;
if (actualInfo == null && ApplicationContextHolder.get() != null) {
actualInfo = ApplicationContextHolder.get().getBean(SystemInfo.class);
}
return actualInfo;
}
public String getScmRevision() {
return scmRevision;
}
public void setScmRevision(String scmRevision) {
this.scmRevision = scmRevision;
}
public String getScmUrl() {
return scmUrl;
}
public void setScmUrl(String scmUrl) {
this.scmUrl = scmUrl;
} |
<<<<<<<
JsonPOJOBuilder.Value builderConfig = (ai == null) ? null : ai.findPOJOBuilderConfig(config, builderClass);
String mutatorPrefix = (builderConfig == null) ? JsonPOJOBuilder.DEFAULT_WITH_PREFIX : builderConfig.withPrefix;
=======
JsonPOJOBuilder.Value builderConfig = (ai == null) ? null : ai.findPOJOBuilderConfig(builderClass);
String mutatorPrefix = (builderConfig == null) ? _withPrefix : builderConfig.withPrefix;
>>>>>>>
JsonPOJOBuilder.Value builderConfig = (ai == null) ? null : ai.findPOJOBuilderConfig(config, builderClass);
String mutatorPrefix = (builderConfig == null) ? _withPrefix : builderConfig.withPrefix; |
<<<<<<<
Object filterId = findFilterId(config, beanDesc);
MapSerializer mapSer = MapSerializer.construct(config.getAnnotationIntrospector().findPropertiesToIgnore(beanDesc.getClassInfo()),
=======
*/
Object filterId = findFilterId(config, beanDesc);
ser = MapSerializer.construct(config.getAnnotationIntrospector().findPropertiesToIgnore(beanDesc.getClassInfo()),
>>>>>>>
*/
Object filterId = findFilterId(config, beanDesc);
MapSerializer mapSer = MapSerializer.construct(config.getAnnotationIntrospector().findPropertiesToIgnore(beanDesc.getClassInfo()),
<<<<<<<
Object suppressableValue = findSuppressableContentValue(config,
type.getContentType(), beanDesc);
if (suppressableValue != null) {
mapSer = mapSer.withContentInclusion(suppressableValue);
}
ser = mapSer;
}
=======
>>>>>>>
Object suppressableValue = findSuppressableContentValue(config,
type.getContentType(), beanDesc);
if (suppressableValue != null) {
mapSer = mapSer.withContentInclusion(suppressableValue);
}
ser = mapSer; |
<<<<<<<
=======
// 24-May-2012, tatu: Comment out for 2.0.x to keep tests green; leave for 2.1 to fix
/*
// [JACKSON-822]: ensure that type can be coerced
public void testTypedLists() throws Exception
{
ObjectMapper mapper = new ObjectMapper();
List<Issue822Interface> list = new ArrayList<Issue822Interface>();
String singleJson = mapper.writerWithType(Issue822Interface.class).writeValueAsString(new Issue822Impl());
// start with specific value case:
assertEquals("{\"a\":3}", singleJson);
// then lists
list.add(new Issue822Impl());
String listJson = mapper.writerWithType(new TypeReference<List<Issue822Interface>>(){})
.writeValueAsString(list);
assertEquals("[{\"a\":3}]", listJson);
}
// [JACKSON-822]: ensure that type can be coerced
public void testTypedArrays() throws Exception
{
ObjectMapper mapper = new ObjectMapper();
assertEquals("[{\"a\":3}]", mapper.writerWithType(Issue822Interface[].class).writeValueAsString(
new Issue822Interface[] { new Issue822Impl() }));
}
*/
>>>>>>>
// [JACKSON-822]: ensure that type can be coerced
public void testTypedLists() throws Exception
{
ObjectMapper mapper = new ObjectMapper();
List<Issue822Interface> list = new ArrayList<Issue822Interface>();
String singleJson = mapper.writerWithType(Issue822Interface.class).writeValueAsString(new Issue822Impl());
// start with specific value case:
assertEquals("{\"a\":3}", singleJson);
// then lists
list.add(new Issue822Impl());
String listJson = mapper.writerWithType(new TypeReference<List<Issue822Interface>>(){})
.writeValueAsString(list);
assertEquals("[{\"a\":3}]", listJson);
}
// [JACKSON-822]: ensure that type can be coerced
public void testTypedArrays() throws Exception
{
ObjectMapper mapper = new ObjectMapper();
assertEquals("[{\"a\":3}]", mapper.writerWithType(Issue822Interface[].class).writeValueAsString(
new Issue822Interface[] { new Issue822Impl() }));
} |
<<<<<<<
=======
import java.util.ArrayList;
import java.util.Collection;
>>>>>>>
import java.util.ArrayList;
import java.util.Collection;
<<<<<<<
import org.apache.zeppelin.user.AuthenticationInfo;
=======
import org.apache.zeppelin.scheduler.JobListener;
import org.apache.zeppelin.search.SearchService;
>>>>>>>
import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.scheduler.JobListener;
import org.apache.zeppelin.search.SearchService;
<<<<<<<
JobListenerFactory, AngularObjectRegistryListener,
RemoteInterpreterProcessListener {
=======
JobListenerFactory, AngularObjectRegistryListener, SearchService {
>>>>>>>
SearchService,
JobListenerFactory, AngularObjectRegistryListener, RemoteInterpreterProcessListener{
<<<<<<<
private void sendAllConfigurations(Session conn,
Notebook notebook) throws IOException {
ZeppelinConfiguration conf = notebook.getConf();
Map<String, String> configurations = conf.dumpConfigurations(conf,
new ZeppelinConfiguration.ConfigurationKeyPredicate() {
@Override
public boolean apply(String key) {
return !key.contains("password") && !key.equals(
ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_AZURE_CONNECTION_STRING.
getVarName());
}
});
conn.getBasicRemote().sendText(serializeMessage(new Message(
OP.CONFIGURATIONS_INFO)
.put("configurations", configurations)));
}
private void checkpointNotebook(Session conn, Notebook notebook,
Message fromMessage) throws IOException {
String noteId = (String) fromMessage.get("noteId");
String commitMessage = (String) fromMessage.get("commitMessage");
notebook.checkpointNote(noteId, commitMessage);
}
=======
@Override
public List<Map<String, String>> query(String string) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void updateIndexDoc(Note note) throws IOException {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void addIndexDocs(Collection<Note> clctn) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void addIndexDoc(Note note) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void deleteIndexDocs(Note note) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void deleteIndexDoc(Note note, Paragraph prgrph) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void close() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
>>>>>>>
private void sendAllConfigurations(Session conn,
Notebook notebook) throws IOException {
ZeppelinConfiguration conf = notebook.getConf();
Map<String, String> configurations = conf.dumpConfigurations(conf,
new ZeppelinConfiguration.ConfigurationKeyPredicate() {
@Override
public boolean apply(String key) {
return !key.contains("password") && !key.equals(
ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_AZURE_CONNECTION_STRING.
getVarName());
}
});
conn.getBasicRemote().sendText(serializeMessage(new Message(
OP.CONFIGURATIONS_INFO)
.put("configurations", configurations)));
}
private void checkpointNotebook(Session conn, Notebook notebook,
Message fromMessage) throws IOException {
String noteId = (String) fromMessage.get("noteId");
String commitMessage = (String) fromMessage.get("commitMessage");
notebook.checkpointNote(noteId, commitMessage);
}
@Override
public List<Map<String, String>> query(String string) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void updateIndexDoc(Note note) throws IOException {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void addIndexDocs(Collection<Note> clctn) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void addIndexDoc(Note note) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void deleteIndexDocs(Note note) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void deleteIndexDoc(Note note, Paragraph prgrph) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void close() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
} |
<<<<<<<
import se.kth.meta.entity.Template;
import se.kth.meta.exception.DatabaseException;
=======
import se.kth.hopsworks.controller.FolderNameValidator;
>>>>>>>
<<<<<<<
=======
private boolean createDataset(String dsPath, Inode parent, String dsName,
int template) throws AppException {
boolean success = false;
try {
success = fileOps.mkDir(dsPath);
//the inode has been created in the file system
if (success && template != 0) {
//get the newly created inode and the template it comes with
Inode neww = inodes.findByParentAndName(parent, dsName);
Template templ = this.template.findByTemplateId(template);
if (templ != null) {
templ.getInodes().add(neww);
//persist the relationship table
this.template.updateTemplatesInodesMxN(templ);
}
}
} catch (IOException ex) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
"Could not create the directory at " + dsPath);
} catch (DatabaseException e) {
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"Could not attach template to inode " + e.getMessage());
}
return success;
}
//this should be in its own class
private void logActivity(String activityPerformed, String flag,
User performedBy, Project performedOn) {
Date now = new Date();
Activity activity = new Activity();
activity.setActivity(activityPerformed);
activity.setFlag(flag);
activity.setProject(performedOn);
activity.setTimestamp(now);
activity.setUser(performedBy);
activityFacade.persistActivity(activity);
}
>>>>>>> |
<<<<<<<
@NotNull
@Size(min = 1, max = 30)
@Column(name = "ethical_satus")
private String ethicalStatus;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "trackStudy")
private Collection<StudyGroups> studyGroupsCollection;
=======
>>>>>>>
@NotNull
@Size(min = 1, max = 30)
@Column(name = "ethical_satus")
private String ethicalStatus;
<<<<<<<
public void setRetentionPeriod(Date retentionPeriod) {
this.retentionPeriod = retentionPeriod;
}
@XmlTransient
@JsonIgnore
public Collection<StudyGroups> getStudyGroupsCollection() {
return studyGroupsCollection;
}
public void setStudyGroupsCollection(Collection<StudyGroups> studyGroupsCollection) {
this.studyGroupsCollection = studyGroupsCollection;
}
=======
>>>>>>>
public void setRetentionPeriod(Date retentionPeriod) {
this.retentionPeriod = retentionPeriod;
} |
<<<<<<<
private String restEndpoint;
=======
private String jobName;
private ElasticProperties elastic;
>>>>>>>
private String restEndpoint;
private String jobName;
private ElasticProperties elastic;
<<<<<<<
Integer projectId, String projectName, String restEndPoint) {
=======
Integer projectId, String projectName, String jobName) {
>>>>>>>
Integer projectId, String projectName, String restEndPoint, String jobName) {
<<<<<<<
this.restEndpoint = restEndPoint;
=======
this.jobName = jobName;
>>>>>>>
this.restEndpoint = restEndPoint;
this.jobName = jobName;
<<<<<<<
public String getRestEndpoint() {
return restEndpoint;
}
public void setRestEndpoint(String restEndPoint) {
this.restEndpoint = restEndPoint;
}
=======
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
>>>>>>>
public String getRestEndpoint() {
return restEndpoint;
}
public void setRestEndpoint(String restEndPoint) {
this.restEndpoint = restEndPoint;
}
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
this.jobName = jobName;
} |
<<<<<<<
// spark 1.5.x replaced --num-executors with --properties-file
// amargs.append(" --num-executors ").append(numberOfExecutors);
=======
// https://fossies.org/diffs/spark/1.4.1_vs_1.5.0/
// yarn/src/main/scala/org/apache/spark/deploy/yarn/
// ApplicationMasterArguments.scala-diff.html
// spark 1.5.x removed --num-executors
// amargs.append(" --num-executors ").append(numberOfExecutors);
>>>>>>>
// spark 1.5.x replaced --num-executors with --properties-file
// https://fossies.org/diffs/spark/1.4.1_vs_1.5.0/
// amargs.append(" --num-executors ").append(numberOfExecutors); |
<<<<<<<
private static final String VARIABLE_ANACONDA_DIR = "anaconda_dir";
=======
private static final String VARIABLE_INFLUXDB_ADDRESS = "influxdb_address";
private static final String VARIABLE_INFLUXDB_USER = "influxdb_user";
private static final String VARIABLE_INFLUXDB_PW = "influxdb_pw";
>>>>>>>
private static final String VARIABLE_ANACONDA_DIR = "anaconda_dir";
private static final String VARIABLE_INFLUXDB_ADDRESS = "influxdb_address";
private static final String VARIABLE_INFLUXDB_USER = "influxdb_user";
private static final String VARIABLE_INFLUXDB_PW = "influxdb_pw";
<<<<<<<
ANACONDA_DIR = setDirVar(VARIABLE_ANACONDA_DIR, ANACONDA_DIR);
=======
INFLUXDB_ADDRESS = setStrVar(VARIABLE_INFLUXDB_ADDRESS, INFLUXDB_ADDRESS);
INFLUXDB_USER = setStrVar(VARIABLE_INFLUXDB_USER, INFLUXDB_USER);
INFLUXDB_PW = setStrVar(VARIABLE_INFLUXDB_PW, INFLUXDB_PW);
>>>>>>>
ANACONDA_DIR = setDirVar(VARIABLE_ANACONDA_DIR, ANACONDA_DIR);
INFLUXDB_ADDRESS = setStrVar(VARIABLE_INFLUXDB_ADDRESS, INFLUXDB_ADDRESS);
INFLUXDB_USER = setStrVar(VARIABLE_INFLUXDB_USER, INFLUXDB_USER);
INFLUXDB_PW = setStrVar(VARIABLE_INFLUXDB_PW, INFLUXDB_PW); |
<<<<<<<
import javax.servlet.ServletContext;
=======
import javax.inject.Inject;
>>>>>>>
import javax.servlet.ServletContext;
import javax.inject.Inject;
<<<<<<<
import se.kth.meta.db.Dbao;
import se.kth.meta.entity.Templates;
import se.kth.meta.exception.DatabaseException;
=======
import se.kth.hopsworks.controller.ResponseMessages;
import se.kth.hopsworks.filters.AllowedRoles;
>>>>>>>
import se.kth.meta.db.Dbao;
import se.kth.meta.entity.Templates;
import se.kth.meta.exception.DatabaseException;
import se.kth.hopsworks.controller.ResponseMessages;
import se.kth.hopsworks.filters.AllowedRoles;
<<<<<<<
Inode parent = inodes.getProjectRoot(this.project.getName());
=======
Inode parent = inodes.getProjectRoot(this.project.getName());
>>>>>>>
Inode parent = inodes.getProjectRoot(this.project.getName());
<<<<<<<
Inode parent = inodes.getProjectRoot(this.project.getName());
Inode lastVisitedParent = new Inode(parent);
=======
Inode parent = inodes.getProjectRoot(this.project.getName());
>>>>>>>
Inode parent = inodes.getProjectRoot(this.project.getName());
Inode lastVisitedParent = new Inode(parent);
<<<<<<<
int resumableChunkNumber = getResumableChunkNumber(request);
ResumableInfo info = getResumableInfo(request, uploadPath);
String fileName = info.resumableFilename;
//check if all the non existing dir names in the path are valid.
Inode parent = inodes.getProjectRoot(this.project.getName());
String[] pathArray = path.split(File.separator);
for (String p : pathArray) {
if (parent != null) {
parent = inodes.findByParentAndName(parent, p);
} else {
datasetNameValidator.isValidName(p);
exist = false;
}
}
if (exist) { //if the path exists check if the file exists.
parent = inodes.findByParentAndName(parent, fileName);
if (parent != null) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
ResponseMessages.FILE_NAME_EXIST);
}
}
long content_length;
//Seek to position
try (RandomAccessFile raf
= new RandomAccessFile(info.resumableFilePath, "rw");
InputStream is = request.getInputStream()) {
//Seek to position
raf.seek((resumableChunkNumber - 1) * (long) info.resumableChunkSize);
//Save to file
long readed = 0;
content_length = HttpUtils.toLong(request.getParameter(
"flowCurrentChunkSize"), -1);
byte[] bytes = new byte[1024 * 100];
while (readed < content_length) {
int r = is.read(bytes);
if (r < 0) {
break;
}
raf.write(bytes, 0, r);
readed += r;
}
}
boolean finished = false;
//Mark as uploaded and check if finished
if (info.addChuckAndCheckIfFinished(
new ResumableInfo.ResumableChunkNumber(
resumableChunkNumber), content_length)) { //Check if all chunks uploaded, and change filename
ResumableInfoStorage.getInstance().remove(info);
logger.log(Level.SEVERE, "All finished.");
finished = true;
} else {
logger.log(Level.SEVERE, "Upload");
}
if (finished) {
try {
uploadPath = Utils.ensurePathEndsInSlash(uploadPath);
fileOps.copyAfterUploading(uploadPath + info.resumableFilename,
uploadPath
+ fileName);
logger.log(Level.SEVERE, "Copied to HDFS");
//might need try catch for security exception
Files.deleteIfExists(Paths.get(stagingManager.getStagingPath()
+ uploadPath + fileName));
} catch (IOException e) {
logger.log(Level.SEVERE, "Failed to write to HDSF", e);
}
}
json.setSuccessMessage("Successfuly uploaded file to " + uploadPath);
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(
json).build();
}
private int getResumableChunkNumber(HttpServletRequest request) {
return HttpUtils.toInt(request.getParameter("flowChunkNumber"), -1);
=======
this.uploader.setPath(uploadPath);
return this.uploader;
>>>>>>>
this.uploader.setPath(uploadPath);
return this.uploader; |
<<<<<<<
builder.addLocalResource(dto, !appPath.startsWith("hdfs:"));
builder.addToAppMasterEnvironment(YarnRunner.KEY_CLASSPATH,
dto.getName());
extraClassPathFiles.append(dto.getName()).append(File.pathSeparator);
=======
if (dto.getName().equals(Settings.K_CERTIFICATE) || dto.getName().equals(Settings.T_CERTIFICATE)) {
//Set deletion to true so that certs are removed
builder.addLocalResource(dto, true);
} else {
if (jobType == JobType.PYSPARK) {
//For PySpark jobs prefix the resource name with __pyfiles__ as spark requires that.
//github.com/apache/spark/blob/v2.1.0/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala#L624
if (dto.getName().endsWith(".py")) {
dto.setName(Settings.SPARK_LOCALIZED_PYTHON_DIR + File.separator + dto.getName());
} else {
pythonPath.append(File.pathSeparator).append(dto.getName());
}
} else {
builder.addToAppMasterEnvironment(YarnRunner.KEY_CLASSPATH, dto.getName());
extraClassPathFiles.append(dto.getName()).append(File.pathSeparator);
}
builder.addLocalResource(dto, !appPath.startsWith("hdfs:"));
}
>>>>>>>
if (jobType == JobType.PYSPARK) {
//For PySpark jobs prefix the resource name with __pyfiles__ as spark requires that.
//github.com/apache/spark/blob/v2.1.0/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala#L624
if (dto.getName().endsWith(".py")) {
dto.setName(Settings.SPARK_LOCALIZED_PYTHON_DIR + File.separator + dto.getName());
} else {
pythonPath.append(File.pathSeparator).append(dto.getName());
}
} else {
builder.addToAppMasterEnvironment(YarnRunner.KEY_CLASSPATH, dto.getName());
extraClassPathFiles.append(dto.getName()).append(File.pathSeparator);
}
builder.addLocalResource(dto, !appPath.startsWith("hdfs:")); |
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY)
=======
final ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY)
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY)
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY)
=======
final ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL,
JsonTypeInfo.As.PROPERTY)
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY)
<<<<<<<
public void testValueToTreeWithDefaultTyping() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY)
=======
public void testValueToTreeWithDefaultTyping() throws Exception {
final ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY)
>>>>>>>
public void testValueToTreeWithDefaultTyping() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY)
<<<<<<<
.enableDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, "@class")
.addModule(testModule)
=======
.activateDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL, "@class")
>>>>>>>
.activateDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, "@class")
.addModule(testModule) |
<<<<<<<
resources.add(se.kth.hopsworks.rest.DataSetService.class);
resources.add(se.kth.hopsworks.rest.ProjectMembers.class);
=======
resources.add(se.kth.hopsworks.rest.CuneiformService.class);
resources.add(se.kth.hopsworks.rest.DataSetService.class);
resources.add(se.kth.hopsworks.rest.JobService.class);
resources.add(se.kth.hopsworks.rest.ProjectMembers.class);
>>>>>>>
resources.add(se.kth.hopsworks.rest.CuneiformService.class);
resources.add(se.kth.hopsworks.rest.DataSetService.class); |
<<<<<<<
@NamedQuery(name = "Consent.findAll",
query = "SELECT c FROM Consent c"),
@NamedQuery(name = "Consent.findById",
query = "SELECT c FROM Consent c WHERE c.id = :id"),
@NamedQuery(name = "Consent.findByDate",
query = "SELECT c FROM Consent c WHERE c.date = :date"),
@NamedQuery(name = "Consent.findByStudyName",
query = "SELECT c FROM Consent c WHERE c.studyName = :studyName"),
@NamedQuery(name = "Consent.findByRetentionPeriod",
query
= "SELECT c FROM Consent c WHERE c.retentionPeriod = :retentionPeriod"),
@NamedQuery(name = "Consent.findByStatus",
query = "SELECT c FROM Consent c WHERE c.status = :status"),
@NamedQuery(name = "Consent.findByName",
query = "SELECT c FROM Consent c WHERE c.name = :name")})
=======
@NamedQuery(name = "Consent.findAll",
query = "SELECT c FROM Consent c"),
@NamedQuery(name = "Consent.findById",
query = "SELECT c FROM Consent c WHERE c.id = :id"),
@NamedQuery(name = "Consent.findByDate",
query = "SELECT c FROM Consent c WHERE c.date = :date"),
@NamedQuery(name = "Consent.findByStatus",
query = "SELECT c FROM Consent c WHERE c.status = :status"),
@NamedQuery(name = "Consent.findByName",
query = "SELECT c FROM Consent c WHERE c.name = :name"),
@NamedQuery(name = "Consent.findByStudyName",
query = "SELECT c FROM Consent c WHERE c.studyName = :studyName"),
@NamedQuery(name = "Consent.findByType",
query = "SELECT c FROM Consent c WHERE c.type = :type")})
>>>>>>>
@NamedQuery(name = "Consent.findAll",
query = "SELECT c FROM Consent c"),
@NamedQuery(name = "Consent.findById",
query = "SELECT c FROM Consent c WHERE c.id = :id"),
@NamedQuery(name = "Consent.findByDate",
query = "SELECT c FROM Consent c WHERE c.date = :date"),
@NamedQuery(name = "Consent.findByStatus",
query = "SELECT c FROM Consent c WHERE c.status = :status"),
@NamedQuery(name = "Consent.findByName",
query = "SELECT c FROM Consent c WHERE c.name = :name"),
@NamedQuery(name = "Consent.findByStudyName",
query = "SELECT c FROM Consent c WHERE c.studyName = :studyName"),
@NamedQuery(name = "Consent.findByType",
query = "SELECT c FROM Consent c WHERE c.type = :type")})
<<<<<<<
private static final long serialVersionUID = 1L;
@Id
@Basic(optional = false)
@NotNull
@Column(name = "id")
private Integer id;
@Column(name = "added")
@Temporal(TemporalType.DATE)
private Date date;
@Size(max = 128)
@Column(name = "study_name")
private String studyName;
@Column(name = "retention_period")
@Temporal(TemporalType.DATE)
private Date retentionPeriod;
@Lob
@Column(name = "consent_form")
private byte[] consentForm;
@Size(max = 30)
@Column(name = "status")
private String status;
@Size(max = 80)
@Column(name = "name")
private String name;
public Consent() {
}
public Consent(Integer id) {
this.id = id;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
public String getStudyName() {
return studyName;
}
public void setStudyName(String studyName) {
this.studyName = studyName;
}
public Date getRetentionPeriod() {
return retentionPeriod;
}
public void setRetentionPeriod(Date retentionPeriod) {
this.retentionPeriod = retentionPeriod;
}
public byte[] getConsentForm() {
return consentForm;
}
public void setConsentForm(byte[] consentForm) {
this.consentForm = consentForm;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof Consent)) {
return false;
}
Consent other = (Consent) object;
if ((this.id == null && other.id != null) || (this.id != null && !this.id.
equals(other.id))) {
return false;
}
return true;
}
@Override
public String toString() {
return "se.kth.bbc.study.privacy.model.Consent[ id=" + id + " ]";
}
=======
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "id")
private Long id;
@Column(name = "date")
@Temporal(TemporalType.DATE)
private Date date;
@Size(max = 30)
@Column(name = "status")
private String status;
@Size(max = 80)
@Column(name = "name")
private String name;
@Size(max = 80)
@Column(name = "study_name")
private String studyName;
public String getStudyName() {
return studyName;
}
public void setStudyName(String studyName) {
this.studyName = studyName;
}
@Lob
@Column(name = "consent_form")
private byte[] consentForm;
@Size(max = 20)
@Column(name = "type")
private String type;
public Consent() {
}
public Consent(Long id) {
this.id = id;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public byte[] getConsentForm() {
return consentForm;
}
public void setConsentForm(byte[] consentForm) {
this.consentForm = consentForm;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof Consent)) {
return false;
}
Consent other = (Consent) object;
if ((this.id == null && other.id != null) || (this.id != null && !this.id.
equals(other.id))) {
return false;
}
return true;
}
@Override
public String toString() {
return "se.kth.bbc.study.privacy.model.Consent[ id=" + id + " ]";
}
>>>>>>>
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "id")
private Long id;
@Column(name = "date")
@Temporal(TemporalType.DATE)
private Date date;
@Size(max = 80)
@Column(name = "study_name")
private String studyName;
@Lob
@Column(name = "consent_form")
private byte[] consentForm;
@Size(max = 30)
@Column(name = "status")
private String status;
@Size(max = 80)
@Column(name = "name")
private String name;
@Size(max = 20)
@Column(name = "type")
private String type;
public String getStudyName() {
return studyName;
}
public void setStudyName(String studyName) {
this.studyName = studyName;
}
public Consent() {
}
public Consent(Long id) {
this.id = id;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public byte[] getConsentForm() {
return consentForm;
}
public void setConsentForm(byte[] consentForm) {
this.consentForm = consentForm;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof Consent)) {
return false;
}
Consent other = (Consent) object;
if ((this.id == null && other.id != null) || (this.id != null && !this.id.
equals(other.id))) {
return false;
}
return true;
}
@Override
public String toString() {
return "se.kth.bbc.study.privacy.model.Consent[ id=" + id + " ]";
} |
<<<<<<<
SPARK_HISTORY_SERVER_IP = setIpVar(VARIABLE_SPARK_HISTORY_SERVER_IP, SPARK_HISTORY_SERVER_IP);
ZK_IP = setIpVar(VARIABLE_ZK_IP, ZK_IP);
ZK_USER = setUserVar(VARIABLE_ZK_USER, ZK_USER);
ZK_DIR = setDirVar(VARIABLE_ZK_DIR, ZK_DIR);
KAFKA_USER = setUserVar(VARIABLE_KAFKA_USER, KAFKA_USER);
KAFKA_DIR = setDirVar(VARIABLE_KAFKA_DIR, KAFKA_DIR);
KAFKA_DEFAULT_NUM_PARTITIONS = setDirVar(VARIABLE_KAFKA_NUM_PARTITIONS, KAFKA_DEFAULT_NUM_PARTITIONS);
KAFKA_DEFAULT_NUM_REPLICAS = setDirVar(VARIABLE_KAFKA_NUM_REPLICAS, KAFKA_DEFAULT_NUM_REPLICAS);
=======
JHS_IP = setIpVar(VARIABLE_JHS_IP, JHS_IP);
OOZIE_IP = setIpVar(VARIABLE_OOZIE_IP, OOZIE_IP);
SPARK_HISTORY_SERVER_IP = setIpVar(VARIABLE_SPARK_HISTORY_SERVER_IP, SPARK_HISTORY_SERVER_IP);
>>>>>>>
JHS_IP = setIpVar(VARIABLE_JHS_IP, JHS_IP);
OOZIE_IP = setIpVar(VARIABLE_OOZIE_IP, OOZIE_IP);
SPARK_HISTORY_SERVER_IP = setIpVar(VARIABLE_SPARK_HISTORY_SERVER_IP, SPARK_HISTORY_SERVER_IP);
ZK_IP = setIpVar(VARIABLE_ZK_IP, ZK_IP);
ZK_USER = setUserVar(VARIABLE_ZK_USER, ZK_USER);
ZK_DIR = setDirVar(VARIABLE_ZK_DIR, ZK_DIR);
KAFKA_USER = setUserVar(VARIABLE_KAFKA_USER, KAFKA_USER);
KAFKA_DIR = setDirVar(VARIABLE_KAFKA_DIR, KAFKA_DIR);
KAFKA_DEFAULT_NUM_PARTITIONS = setDirVar(VARIABLE_KAFKA_NUM_PARTITIONS, KAFKA_DEFAULT_NUM_PARTITIONS);
KAFKA_DEFAULT_NUM_REPLICAS = setDirVar(VARIABLE_KAFKA_NUM_REPLICAS, KAFKA_DEFAULT_NUM_REPLICAS);
<<<<<<<
public static final int ZK_IP_PORT = 2181;
// Zookeeper
private String ZK_IP = "10.0.2.15";
public synchronized String getZkIp() {
checkCache();
return ZK_IP+":"+ZK_IP_PORT;
}
private String ZK_USER = "zk";
public synchronized String getZkUser() {
checkCache();
return ZK_USER;
}
private String KAFKA_USER = "kafka";
public synchronized String getKafkaUser() {
checkCache();
return KAFKA_USER;
}
private String KAFKA_DIR = "/srv/kafka";
public synchronized String getKafkaDir() {
checkCache();
return KAFKA_DIR;
}
private String KAFKA_DEFAULT_NUM_PARTITIONS = "2";
private String KAFKA_DEFAULT_NUM_REPLICAS = "1";
public synchronized String getKafkaDefaultNumPartitions() {
checkCache();
return KAFKA_DEFAULT_NUM_PARTITIONS;
}
public synchronized String getKafkaDefaultNumReplicas() {
checkCache();
return KAFKA_DEFAULT_NUM_REPLICAS;
}
private String ZK_DIR = "/srv/zookeeper";
public synchronized String getZkDir() {
checkCache();
return ZK_DIR;
}
=======
// Oozie
private String OOZIE_IP = "127.0.0.1";
public synchronized String getOozieIp() {
checkCache();
return OOZIE_IP;
}
// MapReduce Job History Server
private String JHS_IP = "127.0.0.1";
public synchronized String getJhsIp() {
checkCache();
return JHS_IP;
}
>>>>>>>
// Oozie
private String OOZIE_IP = "127.0.0.1";
public synchronized String getOozieIp() {
checkCache();
return OOZIE_IP;
}
// MapReduce Job History Server
private String JHS_IP = "127.0.0.1";
public synchronized String getJhsIp() {
checkCache();
return JHS_IP;
}
public static final int ZK_IP_PORT = 2181;
// Zookeeper
private String ZK_IP = "10.0.2.15";
public synchronized String getZkIp() {
checkCache();
return ZK_IP+":"+ZK_IP_PORT;
}
private String ZK_USER = "zk";
public synchronized String getZkUser() {
checkCache();
return ZK_USER;
}
private String KAFKA_USER = "kafka";
public synchronized String getKafkaUser() {
checkCache();
return KAFKA_USER;
}
private String KAFKA_DIR = "/srv/kafka";
public synchronized String getKafkaDir() {
checkCache();
return KAFKA_DIR;
}
private String KAFKA_DEFAULT_NUM_PARTITIONS = "2";
private String KAFKA_DEFAULT_NUM_REPLICAS = "1";
public synchronized String getKafkaDefaultNumPartitions() {
checkCache();
return KAFKA_DEFAULT_NUM_PARTITIONS;
}
public synchronized String getKafkaDefaultNumReplicas() {
checkCache();
return KAFKA_DEFAULT_NUM_REPLICAS;
}
private String ZK_DIR = "/srv/zookeeper";
public synchronized String getZkDir() {
checkCache();
return ZK_DIR;
} |
<<<<<<<
fileOps.copyToHDFSFromLocal(true, new File(stagingManager.getStagingPath(), info.getResumableFilename()).
getAbsolutePath(), uploadPath
+ info.getResumableFilename());
=======
fileOps.copyToHDFSFromLocal(true, new File(stagingManager.
getStagingPath(), info.resumableFilename).
getAbsolutePath(), uploadPath
+ info.resumableFilename);
>>>>>>>
fileOps.copyToHDFSFromLocal(true, new File(stagingManager.
getStagingPath(), info.getResumableFilename()).
getAbsolutePath(), uploadPath
+ info.getResumableFilename()); |
<<<<<<<
=======
} catch (IOException e) {
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(), e.getMessage());
>>>>>>> |
<<<<<<<
private String name;
private boolean dir;
private boolean parent;
private String path;
private Date modification;
private int id;
private int template;
public InodeView() {
}
=======
private String name;
private boolean dir;
private boolean parent;
private String path;
private Date modification;
private Date accessTime;
public InodeView() {
}
>>>>>>>
private String name;
private boolean dir;
private boolean parent;
private String path;
private Date modification;
private Date accessTime;
private int id;
private int template;
public InodeView() {
}
<<<<<<<
public int getId() {
return this.id;
}
public void setId(int id) {
this.id = id;
}
public int getTemplate() {
return this.template;
}
public void setTemplate(int template) {
this.template = template;
}
=======
public Date getAccessTime() {
return accessTime;
}
>>>>>>>
public Date getAccessTime() {
return accessTime;
} |
<<<<<<<
=======
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import se.kth.bbc.jobs.jobhistory.ExecutionInputfilesFacade;
>>>>>>>
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType; |
<<<<<<<
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
=======
import java.util.Collections;
import java.util.Comparator;
>>>>>>>
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.Comparator;
<<<<<<<
logger.log(Level.SEVERE,
ResponseMessages.PROJECT_SERVICE_NOT_FOUND, iex);
=======
LOGGER.log(Level.SEVERE,
ResponseMessages.PROJECT_SERVICE_NOT_FOUND, iex);
>>>>>>>
LOGGER.log(Level.SEVERE,
ResponseMessages.PROJECT_SERVICE_NOT_FOUND, iex);
<<<<<<<
} catch (Exception ex) {
logger.log(Level.SEVERE, null, ex);
=======
}catch (Exception ex) {
LOGGER.log(Level.SEVERE, null, ex);
>>>>>>>
} catch (Exception ex) {
LOGGER.log(Level.SEVERE, null, ex);
<<<<<<<
|| !noExistingGroup(project.getName())
|| !verifyQuota(project.getName()) || !verifyLogs(dfso, project.
getName())
|| !noExistingCertificates(project.getName())) {
logger.log(Level.WARNING,
"some elements of project {0} already exist in the system "
+ "Possible inconsistency!",
project.getName());
=======
|| !noExistingGroup(project.getName())
|| !verifyQuota(project.getName()) || !verifyLogs(dfso, project.getName())
|| !noExistingCertificates(project.getName())) {
LOGGER.log(Level.WARNING,
"some elements of project {0} already exist in the system "
+ "Possible inconsistency!",
project.getName());
>>>>>>>
|| !noExistingGroup(project.getName())
|| !verifyQuota(project.getName()) || !verifyLogs(dfso, project.
getName())
|| !noExistingCertificates(project.getName())) {
LOGGER.log(Level.WARNING,
"some elements of project {0} already exist in the system "
+ "Possible inconsistency!",
project.getName());
<<<<<<<
logger.log(Level.INFO, "Project with name {0} already exists!",
projectName);
=======
LOGGER.log(Level.INFO, "Project with name {0} already exists!",
projectName);
>>>>>>>
LOGGER.log(Level.INFO, "Project with name {0} already exists!",
projectName);
<<<<<<<
if (projectInode != null) {
=======
if(projectInode!=null){
LOGGER.log(Level.WARNING, "project folder existing for project {0}", project.getName());
>>>>>>>
if (projectInode != null) {
LOGGER.log(Level.WARNING, "project folder existing for project {0}",
project.getName());
<<<<<<<
logger.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
=======
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"something went wrong when adding the example jar to the project");
>>>>>>>
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
<<<<<<<
logger.log(Level.WARNING,
"More than one spark-examples*.jar found in {0}.", dir.
=======
LOGGER.log(Level.WARNING,
"More than one spark-examples*.jar found in {0}.", dir.
>>>>>>>
LOGGER.log(Level.WARNING,
"More than one spark-examples*.jar found in {0}.", dir.
<<<<<<<
logger.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
=======
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"something went wrong when adding the example jar to the project");
>>>>>>>
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
<<<<<<<
logger.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
=======
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"something went wrong when adding the example jar to the project");
>>>>>>>
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project"); |
<<<<<<<
import se.kth.hopsworks.hdfs.fileoperations.DistributedFsService;
=======
import se.kth.hopsworks.hdfsUsers.controller.HdfsUsersController;
>>>>>>>
import se.kth.hopsworks.hdfs.fileoperations.DistributedFsService;
import se.kth.hopsworks.hdfsUsers.controller.HdfsUsersController;
<<<<<<<
@EJB
private DistributedFsService dfs;
=======
@EJB
private Settings settings;
@EJB
private YarnApplicationstateFacade yarnApplicationstateFacade;
@EJB
private HdfsUsersController hdfsUsersBean;
>>>>>>>
@EJB
private DistributedFsService dfs;
@EJB
private Settings settings;
@EJB
private YarnApplicationstateFacade yarnApplicationstateFacade;
@EJB
private HdfsUsersController hdfsUsersBean;
<<<<<<<
=======
private boolean hasAppAccessRight(String trackingUrl, JobDescription job){
String appId ="";
if(trackingUrl.contains("application_")){
for(String elem: trackingUrl.split("/")){
if(elem.contains("application_")){
appId = elem;
break;
}
}
}else if (trackingUrl.contains("container_")){
appId ="application_";
for(String elem: trackingUrl.split("/")){
if(elem.contains("container_")){
String[] containerIdElem = elem.split("_");
appId = appId + containerIdElem[1] + "_" + containerIdElem[2];
break;
}
}
}
if (appId != "") {
String appUser = yarnApplicationstateFacade.findByAppId(appId).
getAppuser();
if (!job.getProject().getName().equals(hdfsUsersBean.getProjectName(
appUser))) {
return false;
}
}
return true;
}
>>>>>>>
private boolean hasAppAccessRight(String trackingUrl, JobDescription job){
String appId ="";
if(trackingUrl.contains("application_")){
for(String elem: trackingUrl.split("/")){
if(elem.contains("application_")){
appId = elem;
break;
}
}
}else if (trackingUrl.contains("container_")){
appId ="application_";
for(String elem: trackingUrl.split("/")){
if(elem.contains("container_")){
String[] containerIdElem = elem.split("_");
appId = appId + containerIdElem[1] + "_" + containerIdElem[2];
break;
}
}
}
if (appId != "") {
String appUser = yarnApplicationstateFacade.findByAppId(appId).
getAppuser();
if (!job.getProject().getName().equals(hdfsUsersBean.getProjectName(
appUser))) {
return false;
}
}
return true;
} |
<<<<<<<
=======
public String getOwnerRole() {
return owner;
}
public void setOwnerRole(String owner) {
this.owner = owner;
}
public String getNewTeamRole() {
return newTeamRole;
}
public void setNewTeamRole(String newTeamRole) {
this.newTeamRole = newTeamRole;
}
public String getChangedRole() {
return newChangedRole;
}
public void setChangedRole(String newChangedRole) {
this.newChangedRole = newChangedRole;
}
public String getNewRole() {
return newRole;
}
public void setNewRole(String newRole) {
this.newRole = newRole;
}
>>>>>>>
<<<<<<<
=======
>>>>>>>
<<<<<<<
=======
public DatasetStudy getDatasetStudy() {
if (dsStudy == null) {
dsStudy = new DatasetStudy();
}
return dsStudy;
}
public void setDatasetStudy(DatasetStudy dsStudy) {
this.dsStudy = dsStudy;
}
public Dataset getDataset() {
if (dataset == null) {
dataset = new Dataset();
}
return dataset;
}
public void setDataset(Dataset dataset) {
this.dataset = dataset;
}
>>>>>>>
<<<<<<<
=======
public String getStudyTeamRole(String email) {
this.setTeamRole = studyTeamController.findByPrimaryKey(studyName, email).getTeamRole();
return setTeamRole;
}
public void setStudyTeamRole(String email, String role) {
this.setTeamRole = role;
}
>>>>>>>
<<<<<<<
=======
public List<StudyRoleTypes> getTeamForResearchList() {
List<StudyRoleTypes> reOrder = new ArrayList<>();
reOrder.add(StudyRoleTypes.RESEARCHER);
reOrder.add(StudyRoleTypes.MASTER);
reOrder.add(StudyRoleTypes.AUDITOR);
return reOrder;
}
public List<StudyRoleTypes> getTeamForGuestList() {
List<StudyRoleTypes> reOrder = new ArrayList<>();
reOrder.add(StudyRoleTypes.AUDITOR);
reOrder.add(StudyRoleTypes.MASTER);
reOrder.add(StudyRoleTypes.RESEARCHER);
return reOrder;
}
>>>>>>>
<<<<<<<
=======
>>>>>>>
<<<<<<<
//Set the study owner as study master in StudyTeam table
=======
>>>>>>>
//Set the study owner as study master in StudyTeam table
<<<<<<<
boolean rec = sampleFilesController.checkForExistingSampleFiles(getSampleID(), fileName);
=======
boolean rec = sampleFilesController.checkForExistingSampleFiles(getSampleID(), fileName);
>>>>>>>
boolean rec = sampleFilesController.checkForExistingSampleFiles(getSampleID(), fileName);
<<<<<<<
public void mkDIRS(String sampleID, String fileType, String fileName) throws IOException {
=======
public void mkDIRS(String fileType, String fileName, String sampleID) throws IOException, URISyntaxException {
>>>>>>>
public void mkDIRS(String sampleID, String fileType, String fileName) throws IOException {
<<<<<<<
fs.mkdirs(path.suffix(File.separator + sampleID + File.separator + fileType.toUpperCase().trim()), null);
copyFromLocal(fileType, fileName, sampleID);
} catch (URISyntaxException uri) {
logger.log(Level.SEVERE, "Directories were not created in HDFS...{0}", uri.getMessage());
=======
fs.mkdirs(path.suffix(File.separator + sampleID + File.separator + fileType.toUpperCase().trim()), null);
copyFromLocal(fileType, fileName, sampleID);
} catch (IOException ioe) {
System.err.println("IOException during operation" + ioe.getMessage());
>>>>>>>
fs.mkdirs(path.suffix(File.separator + sampleID + File.separator + fileType.toUpperCase().trim()), null);
copyFromLocal(fileType, fileName, sampleID);
} catch (URISyntaxException uri) {
logger.log(Level.SEVERE, "Directories were not created in HDFS...{0}", uri.getMessage());
<<<<<<<
logger.log(Level.INFO, "Sample file status updated in TreeTable, ID: {0}, Name: {1}", new Object[]{id,filename});
=======
for (FileStructureListener l : fileListeners) {
l.updateStatus(id, filename.substring(filename.lastIndexOf('.') + 1), filename, "available");
}
>>>>>>>
for (FileStructureListener l : fileListeners) {
l.updateStatus(id, filename.substring(filename.lastIndexOf('.') + 1), filename, "available");
}
<<<<<<<
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId);
if (fs.exists(build)) {
fs.delete(build, true);
logger.log(Level.INFO, "{0} - Sample was deleted from {1} in HDFS", new Object[]{sampleId, studyName});
} else {
logger.log(Level.SEVERE, "Sample id {0} does not exist", sampleId);
}
//remove the sample from SampleIds
deleteSamples(sampleId);
=======
/*
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId);
if (fs.exists(build)) {
fs.delete(build, true);
} else {
System.out.println("Sample ID does not exist");
}
*/
//remove the sample from SampleIds
deleteSamples(sampleId);
>>>>>>>
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId);
if (fs.exists(build)) {
fs.delete(build, true);
logger.log(Level.INFO, "{0} - Sample was deleted from {1} in HDFS", new Object[]{sampleId, studyName});
} else {
logger.log(Level.SEVERE, "Sample id {0} does not exist", sampleId);
}
//remove the sample from SampleIds
deleteSamples(sampleId);
<<<<<<<
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim());
if (fs.exists(build)) {
fs.delete(build, true);
logger.log(Level.INFO, "{0} - File type folder was deleted from {1} in HDFS", new Object[]{fileType.toUpperCase(), studyName});
} else {
logger.log(Level.SEVERE, "{0} - File type folder does not exist", fileType.toUpperCase());
}
//remove file type records
deleteFileTypes(sampleId, fileType);
=======
/*
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim());
if (fs.exists(build)) {
fs.delete(build, true);
} else {
System.out.println("File Type folder does not exist");
}
*/
//remove file type records
deleteFileTypes(sampleId, fileType);
>>>>>>>
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim());
if (fs.exists(build)) {
fs.delete(build, true);
logger.log(Level.INFO, "{0} - File type folder was deleted from {1} in HDFS", new Object[]{fileType.toUpperCase(), studyName});
} else {
logger.log(Level.SEVERE, "{0} - File type folder does not exist", fileType.toUpperCase());
}
//remove file type records
deleteFileTypes(sampleId, fileType);
<<<<<<<
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + filename);
//check the file count inside a directory, if it is only one then recursive delete
if (fs.exists(build)) {
//if(fs.getFileStatus(build))
fs.delete(build, false);
} else {
System.out.println("File does not exist");
}
=======
/*
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + filename);
if (fs.exists(build)) {
fs.delete(build, false);
} else {
System.out.println("File does not exist");
}
*/
>>>>>>>
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + filename);
//check the file count inside a directory, if it is only one then recursive delete
if (fs.exists(build)) {
//if(fs.getFileStatus(build))
fs.delete(build, false);
} else {
System.out.println("File does not exist");
}
<<<<<<<
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path outputPath = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + fileName.trim());
try {
if (!fs.exists(outputPath)) {
System.out.println("Error: File does not exist for downloading" + fileName);
return;
}
InputStream inStream = fs.open(outputPath, 1048576);
file = new DefaultStreamedContent(inStream, "fastq/fasta/bam/sam/vcf", fileName);
} finally {
//inStream.close();
}
=======
/*
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path outputPath = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + fileName.trim());
//System.out.println("download path "+outputPath.toString());
try {
if (!fs.exists(outputPath)) {
System.out.println("Error: File does not exist for downloading" + fileName);
return;
}
InputStream inStream = fs.open(outputPath, 1048576);
file = new DefaultStreamedContent(inStream, "fastq/fasta/bam/sam/vcf", fileName);
} finally {
//inStream.close();
}*/
System.out.println("Called download");
>>>>>>>
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path outputPath = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + fileName.trim());
try {
if (!fs.exists(outputPath)) {
System.out.println("Error: File does not exist for downloading" + fileName);
return;
}
InputStream inStream = fs.open(outputPath, 1048576);
file = new DefaultStreamedContent(inStream, "fastq/fasta/bam/sam/vcf", fileName);
} finally {
//inStream.close();
}
<<<<<<<
=======
public String onComplete() {
return "indexPage";
}
public void save(ActionEvent actionEvent) {
createStudy();
}
public String onFlowProcess(FlowEvent event) {
logger.info(event.getOldStep());
logger.info(event.getNewStep());
return event.getNewStep();
}
public void showNewStudyDialog() {
RequestContext.getCurrentInstance().update("formNewStudy");
RequestContext.getCurrentInstance().reset("formNewStudy");
RequestContext.getCurrentInstance().execute("dlgNewStudy.show()");
}
public void showNewStudyMemberDialog() {
RequestContext.getCurrentInstance().update("formNewStudyMember");
RequestContext.getCurrentInstance().reset("formNewStudyMember");
RequestContext.getCurrentInstance().execute("dlgNewStudyMember.show()");
}
>>>>>>>
<<<<<<<
/*public void deleteDataDlg() {
Map<String, Object> options = new HashMap<>();
options.put("modal", true);
options.put("draggable", false);
options.put("resizable", false);
options.put("width", 320);
options.put("contentWidth", 300);
options.put("height", 100);
RequestContext.getCurrentInstance().openDialog("confirmDelete", options, null);
}
public void closeConfirmDelete() {
RequestContext.getCurrentInstance().closeDialog(null);
}
public void onDeleteDlgDone(SelectEvent event) {
FacesMessage mess = (FacesMessage) (event.getObject());
FacesContext.getCurrentInstance().addMessage("remove", mess);
}*/
=======
public void registerFileListener(FileStructureListener listener) {
if (!fileListeners.contains(listener)) {
fileListeners.add(listener);
}
}
>>>>>>>
public void registerFileListener(FileStructureListener listener) {
if (!fileListeners.contains(listener)) {
fileListeners.add(listener);
}
} |
<<<<<<<
private boolean enableLogDir = true;
private String eventLogDir;
=======
private String sessionId;//used by Kafka
>>>>>>>
private boolean enableLogDir = true;
private String eventLogDir;
private String sessionId;//used by Kafka
<<<<<<<
// if (classPath == null || classPath.isEmpty()) {
// builder.addToAppMasterEnvironment("CLASSPATH", sparkClasspath);
// } else {
// builder.addToAppMasterEnvironment("CLASSPATH", classPath + ":"
// + sparkClasspath);
// }
=======
//Removed local Spark classpath
// if (classPath == null || classPath.isEmpty()) {
// builder.addToAppMasterEnvironment("CLASSPATH", sparkClasspath);
// } else {
// builder.addToAppMasterEnvironment("CLASSPATH", classPath + ":"
// + sparkClasspath);
// }
>>>>>>>
//Removed local Spark classpath
// if (classPath == null || classPath.isEmpty()) {
// builder.addToAppMasterEnvironment("CLASSPATH", sparkClasspath);
// } else {
// builder.addToAppMasterEnvironment("CLASSPATH", classPath + ":"
// + sparkClasspath);
// }
<<<<<<<
=======
addSystemProperty(Settings.KAFKA_SESSIONID_ENV_VAR, sessionId);
addSystemProperty(Settings.SPARK_HISTORY_SERVER_ENV, sparkHistoryServerIp);
addSystemProperty(Settings.SPARK_NUMBER_EXECUTORS, Integer.toString(
numberOfExecutors));
>>>>>>>
addSystemProperty(Settings.KAFKA_SESSIONID_ENV_VAR, sessionId);
addSystemProperty(Settings.SPARK_HISTORY_SERVER_ENV, sparkHistoryServerIp);
addSystemProperty(Settings.SPARK_NUMBER_EXECUTORS, Integer.toString(
numberOfExecutors));
<<<<<<<
// Spark Configuration File. Needed for the Spark History Server
amargs.append(" --properties-file");
amargs.append(" /srv/spark/conf/spark-defaults.conf");
=======
// amargs.append(" --properties-file");
// amargs.append(" /srv/spark/conf/spark-defaults.conf");
>>>>>>>
// Spark Configuration File. Needed for the Spark History Server
amargs.append(" --properties-file");
amargs.append(" /srv/spark/conf/spark-defaults.conf");
// amargs.append(" --properties-file");
// amargs.append(" /srv/spark/conf/spark-defaults.conf"); |
<<<<<<<
PaasCredentials credentials = credentialsEJB.find();
Provider check = Provider.fromString(credentials.getProvider());
if (Provider.AWS_EC2.equals(check)) {
provider = Provider.AWS_EC2.toString();
=======
PaaSCredentials credentials = credentialsEJB.find();
ProviderType check = ProviderType.fromString(credentials.getProvider());
if (ProviderType.AWS_EC2.equals(check)) {
provider = ProviderType.AWS_EC2.toString();
>>>>>>>
PaasCredentials credentials = credentialsEJB.find();
ProviderType check = ProviderType.fromString(credentials.getProvider());
if (ProviderType.AWS_EC2.equals(check)) {
provider = ProviderType.AWS_EC2.toString(); |
<<<<<<<
=======
private String kafkaAddress;
>>>>>>>
private String kafkaAddress; |
<<<<<<<
import se.kth.bbc.fileoperations.FileSystemOperations;
import se.kth.bbc.lims.Constants;
=======
>>>>>>>
import se.kth.bbc.fileoperations.FileSystemOperations; |
<<<<<<<
private boolean createDataset(String dsPath, Inode parent, String dsName,
int template) throws AppException {
boolean success = false;
try {
success = fileOps.mkDir(dsPath);
//the inode has been created in the file system
if (success && template != 0) {
//get the newly created inode and the template it comes with
Inode neww = inodes.findByParentAndName(parent, dsName);
Template templ = this.template.findByTemplateId(template);
if (templ != null) {
templ.getInodes().add(neww);
//persist the relationship table
this.template.updateTemplatesInodesMxN(templ);
}
}
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
"Could not create the directory at " + dsPath);
} catch (DatabaseException e) {
logger.log(Level.SEVERE, null, e);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"Could not attach template to inode " + e.getMessage());
}
return success;
}
//this should be in its own class
private void logActivity(String activityPerformed, String flag,
User performedBy, Project performedOn) {
Date now = new Date();
Activity activity = new Activity();
activity.setActivity(activityPerformed);
activity.setFlag(flag);
activity.setProject(performedOn);
activity.setTimestamp(now);
activity.setUser(performedBy);
activityFacade.persistActivity(activity);
}
private String getFullPath(String path) throws AppException {
//Strip leading slashes.
while (path.startsWith("/")) {
path = path.substring(1);
}
String dsName;
String projectName;
String[] parts = path.split(File.separator);
if (parts != null && parts[0].contains(Constants.SHARED_FILE_SEPARATOR)) {
//we can split the string and get the project name, but we have to
//make sure that the user have access to the dataset.
String[] shardDS = parts[0].split(Constants.SHARED_FILE_SEPARATOR);
if (shardDS.length < 2) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
ResponseMessages.DATASET_NOT_FOUND);
}
projectName = shardDS[0];
dsName = shardDS[1];
Inode parent = inodes.getProjectRoot(projectName);
Inode dsInode = inodes.findByParentAndName(parent, dsName);
this.dataset = datasetFacade.findByProjectAndInode(this.project, dsInode);
if (this.dataset == null) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
ResponseMessages.DATASET_NOT_FOUND);
}
path = path.replaceFirst(projectName + Constants.SHARED_FILE_SEPARATOR
+ dsName, projectName
+ File.separator + dsName);
} else {
return this.path + path;
}
return File.separator + Constants.DIR_ROOT + File.separator
+ path;
}
=======
@POST
@Path("/attachTemplate")
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.DATA_OWNER})
public Response attachTemplate(FileTemplateDTO filetemplateData) throws
AppException {
if (filetemplateData == null || filetemplateData.getInodePath() == null
|| filetemplateData.getInodePath().equals("")) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
ResponseMessages.TEMPLATE_INODEID_EMPTY);
}
String inodePath = filetemplateData.getInodePath();
int templateid = filetemplateData.getTemplateId();
Inode inode = inodes.getInodeAtPath(inodePath);
Template temp = template.findByTemplateId(templateid);
temp.getInodes().add(inode);
logger.log(Level.INFO, "ATTACHING TEMPLATE {0} TO INODE {0}",
new Object[]{templateid, inode.getId()});
try {
//persist the relationship
this.template.updateTemplatesInodesMxN(temp);
} catch (DatabaseException e) {
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
ResponseMessages.TEMPLATE_NOT_ATTACHED);
}
JsonResponse json = new JsonResponse();
json.setSuccessMessage("The template was attached to file "
+ inode.getId());
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(
json).build();
}
>>>>>>>
private boolean createDataset(String dsPath, Inode parent, String dsName,
int template) throws AppException {
boolean success = false;
try {
success = fileOps.mkDir(dsPath);
//the inode has been created in the file system
if (success && template != 0) {
//get the newly created inode and the template it comes with
Inode neww = inodes.findByParentAndName(parent, dsName);
Template templ = this.template.findByTemplateId(template);
if (templ != null) {
templ.getInodes().add(neww);
//persist the relationship table
this.template.updateTemplatesInodesMxN(templ);
}
}
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
"Could not create the directory at " + dsPath);
} catch (DatabaseException e) {
logger.log(Level.SEVERE, null, e);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"Could not attach template to inode " + e.getMessage());
}
return success;
}
//this should be in its own class
private void logActivity(String activityPerformed, String flag,
User performedBy, Project performedOn) {
Date now = new Date();
Activity activity = new Activity();
activity.setActivity(activityPerformed);
activity.setFlag(flag);
activity.setProject(performedOn);
activity.setTimestamp(now);
activity.setUser(performedBy);
activityFacade.persistActivity(activity);
}
private String getFullPath(String path) throws AppException {
//Strip leading slashes.
while (path.startsWith("/")) {
path = path.substring(1);
}
String dsName;
String projectName;
String[] parts = path.split(File.separator);
if (parts != null && parts[0].contains(Constants.SHARED_FILE_SEPARATOR)) {
//we can split the string and get the project name, but we have to
//make sure that the user have access to the dataset.
String[] shardDS = parts[0].split(Constants.SHARED_FILE_SEPARATOR);
if (shardDS.length < 2) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
ResponseMessages.DATASET_NOT_FOUND);
}
projectName = shardDS[0];
dsName = shardDS[1];
Inode parent = inodes.getProjectRoot(projectName);
Inode dsInode = inodes.findByParentAndName(parent, dsName);
this.dataset = datasetFacade.findByProjectAndInode(this.project, dsInode);
if (this.dataset == null) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
ResponseMessages.DATASET_NOT_FOUND);
}
path = path.replaceFirst(projectName + Constants.SHARED_FILE_SEPARATOR
+ dsName, projectName
+ File.separator + dsName);
} else {
return this.path + path;
}
return File.separator + Constants.DIR_ROOT + File.separator
+ path;
}
@POST
@Path("/attachTemplate")
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.DATA_OWNER})
public Response attachTemplate(FileTemplateDTO filetemplateData) throws
AppException {
if (filetemplateData == null || filetemplateData.getInodePath() == null
|| filetemplateData.getInodePath().equals("")) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
ResponseMessages.TEMPLATE_INODEID_EMPTY);
}
String inodePath = filetemplateData.getInodePath();
int templateid = filetemplateData.getTemplateId();
Inode inode = inodes.getInodeAtPath(inodePath);
Template temp = template.findByTemplateId(templateid);
temp.getInodes().add(inode);
logger.log(Level.INFO, "ATTACHING TEMPLATE {0} TO INODE {0}",
new Object[]{templateid, inode.getId()});
try {
//persist the relationship
this.template.updateTemplatesInodesMxN(temp);
} catch (DatabaseException e) {
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
ResponseMessages.TEMPLATE_NOT_ATTACHED);
}
JsonResponse json = new JsonResponse();
json.setSuccessMessage("The template was attached to file "
+ inode.getId());
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(
json).build();
} |
<<<<<<<
= "SELECT d FROM Dataset d WHERE d.name = :name AND d.project = :projectId")})
=======
= "SELECT d FROM Dataset d WHERE d.name = :name AND d.projectId = :projectId"),
@NamedQuery(name = "Dataset.findSharedWithProject",
query
= "SELECT d FROM Dataset d WHERE d.projectId = :projectId AND "
+ "d.shared = true")})
>>>>>>>
= "SELECT d FROM Dataset d WHERE d.name = :name AND d.project = :projectId"),
@NamedQuery(name = "Dataset.findSharedWithProject",
query
= "SELECT d FROM Dataset d WHERE d.project = :projectId AND "
+ "d.shared = true")}) |
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance)
=======
ObjectMapper m = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, ".hype")
=======
ObjectMapper m = JsonMapper.builder()
.activateDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL,
".hype")
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL, ".hype")
<<<<<<<
=======
m = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
.build();
>>>>>>>
<<<<<<<
m = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE)
=======
m = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.OBJECT_AND_NON_CONCRETE)
>>>>>>>
m = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE)
<<<<<<<
// first: use "object or abstract" typing: should produce no type info:
ObjectMapper m = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE)
=======
ObjectMapper m = JsonMapper.builder()
// first: use "object or abstract" typing: should produce no type info:
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.OBJECT_AND_NON_CONCRETE)
>>>>>>>
// first: use "object or abstract" typing: should produce no type info:
ObjectMapper m = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE)
<<<<<<<
m = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL)
=======
m = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL)
>>>>>>>
m = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL)
<<<<<<<
ObjectMapper m = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL)
=======
ObjectMapper m = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL)
>>>>>>>
ObjectMapper m = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL)
<<<<<<<
ObjectMapper m = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance)
=======
ObjectMapper m = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
>>>>>>>
ObjectMapper m = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
<<<<<<<
ObjectMapper m = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance)
=======
ObjectMapper m = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
>>>>>>>
ObjectMapper m = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
<<<<<<<
ObjectMapper m = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance)
=======
ObjectMapper m = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
>>>>>>>
ObjectMapper m = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance)
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL)
=======
ObjectMapper mapper = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL)
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL)
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL)
=======
ObjectMapper mapper = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.NON_FINAL)
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.NON_FINAL)
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE, JsonTypeInfo.As.PROPERTY)
=======
ObjectMapper mapper = JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.OBJECT_AND_NON_CONCRETE, JsonTypeInfo.As.PROPERTY)
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE, JsonTypeInfo.As.PROPERTY)
<<<<<<<
ObjectMapper mapper = jsonMapperBuilder()
.enableDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE, "*CLASS*")
=======
ObjectMapper mapper = JsonMapper.builder()
.activateDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.OBJECT_AND_NON_CONCRETE, "*CLASS*")
>>>>>>>
ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTypingAsProperty(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE, "*CLASS*")
<<<<<<<
/*ObjectMapper mapper =*/ jsonMapperBuilder()
.enableDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.JAVA_LANG_OBJECT, JsonTypeInfo.As.EXTERNAL_PROPERTY)
=======
/*ObjectMapper mapper =*/ JsonMapper.builder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT,
JsonTypeInfo.As.EXTERNAL_PROPERTY)
>>>>>>>
/*ObjectMapper mapper =*/ jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.JAVA_LANG_OBJECT, JsonTypeInfo.As.EXTERNAL_PROPERTY) |
<<<<<<<
_mixIns, rootNames, _configOverrides,
_coercionConfigs);
=======
_subtypeResolver, _mixIns, rootNames, _configOverrides);
>>>>>>>
_subtypeResolver, _mixIns, rootNames, _configOverrides,
_coercionConfigs); |
<<<<<<<
private TornadoCoreRuntime() {
=======
private final OptionValues options;
// @formatter:off
public enum TORNADO_DRIVERS_DESCRIPTION {
OPENCL("implemented"),
PTX("unsupported");
String status;
TORNADO_DRIVERS_DESCRIPTION(String status) {
this.status = status;
}
String getStatus() {
return status;
}
}
// @formatter:on
public TornadoCoreRuntime() {
>>>>>>>
// @formatter:off
public enum TORNADO_DRIVERS_DESCRIPTION {
OPENCL("implemented"),
PTX("unsupported");
String status;
TORNADO_DRIVERS_DESCRIPTION(String status) {
this.status = status;
}
String getStatus() {
return status;
}
}
// @formatter:on
private TornadoCoreRuntime() {
<<<<<<<
=======
EconomicMap<OptionKey<?>, Object> opts = OptionValues.newOptionMap();
opts.putAll(HotSpotGraalOptionValues.HOTSPOT_OPTIONS.getMap());
opts.put(GraalOptions.OmitHotExceptionStacktrace, false);
opts.put(GraalOptions.MatchExpressions, true);
opts.put(GraalOptions.RemoveNeverExecutedCode, false);
opts.put(ConstantLoadOptimization.Options.LIROptConstantLoadOptimization, false);
opts.put(PostAllocationOptimizationStage.Options.LIROptRedundantMoveElimination, false);
options = new OptionValues(opts);
>>>>>>> |
<<<<<<<
return ctxt.handleUnexpectedToken(getValueType(ctxt), p.currentToken(), p,
"Cannot deserialize a POJO (of type %s) from non-Array representation (token: %s): "
+"type/property designed to be serialized as JSON Array",
_beanType.getRawClass().getName(),
p.currentToken());
=======
String message = "Cannot deserialize a POJO (of type %s) from non-Array representation (token: %s): "
+"type/property designed to be serialized as JSON Array";
return ctxt.handleUnexpectedToken(getValueType(ctxt), p.currentToken(), p,
message, ClassUtil.getTypeDescription(_beanType), p.currentToken());
>>>>>>>
return ctxt.handleUnexpectedToken(getValueType(ctxt), p.currentToken(), p,
"Cannot deserialize a POJO (of type %s) from non-Array representation (token: %s): "
+"type/property designed to be serialized as JSON Array",
ClassUtil.getTypeDescription(_beanType),
p.currentToken()); |
<<<<<<<
private boolean isOpenCLPreLoadBinary(OCLDeviceContextInterface deviceContext, String deviceInfo) {
=======
@Override
public DeviceBuffer createBuffer(int[] arr) {
return getDeviceContext().getMemoryManager().createDeviceBuffer(arr);
}
private boolean isOpenCLPreLoadBinary(OCLDeviceContext deviceContext, String deviceInfo) {
>>>>>>>
@Override
public DeviceBuffer createBuffer(int[] arr) {
return getDeviceContext().getMemoryManager().createDeviceBuffer(arr);
}
private boolean isOpenCLPreLoadBinary(OCLDeviceContextInterface deviceContext, String deviceInfo) {
<<<<<<<
final ObjectBuffer buffer = createDeviceBuffer(object.getClass(), object, (OCLDeviceContext) getDeviceContext(), batchSize);
=======
final ObjectBuffer buffer = createDeviceBuffer(object.getClass(), object, getDeviceContext(), batchSize);
>>>>>>>
final ObjectBuffer buffer = createDeviceBuffer(object.getClass(), object, (OCLDeviceContext) getDeviceContext(), batchSize); |
<<<<<<<
boolean isCached(String methodName, SchedulableTask task);
=======
int getDeviceIndex();
String getDeviceName();
>>>>>>>
boolean isCached(String methodName, SchedulableTask task);
int getDeviceIndex();
String getDeviceName(); |
<<<<<<<
import org.graalvm.compiler.lir.Variable;
=======
import uk.ac.manchester.tornado.drivers.opencl.graal.OCLArchitecture;
>>>>>>>
import org.graalvm.compiler.lir.Variable;
import uk.ac.manchester.tornado.drivers.opencl.graal.OCLArchitecture; |
<<<<<<<
public void enableThreadSharing() {
TornadoInternalError.unimplemented();
}
@Override
=======
public int[] checkAtomicsForTask(SchedulableTask task) {
return null;
}
@Override
>>>>>>>
public int[] checkAtomicsForTask(SchedulableTask task) {
return null;
}
@Override
public void enableThreadSharing() {
TornadoInternalError.unimplemented();
}
@Override |
<<<<<<<
long copyInValue = timeProfiler.getTimer(ProfilerType.COPY_IN_TIME);
copyInValue += event.getExecutionTime();
timeProfiler.setTimer(ProfilerType.COPY_IN_TIME, copyInValue);
long dispatchValue = timeProfiler.getTimer(ProfilerType.DISPATCH_TIME);
dispatchValue += event.getDriverDispatchTime();
timeProfiler.setTimer(ProfilerType.DISPATCH_TIME, dispatchValue);
=======
long value = timeProfiler.getTimer(ProfilerType.COPY_IN_TIME);
value += event.getExecutionTime();
timeProfiler.setTimer(ProfilerType.COPY_IN_TIME, value);
timeProfiler.addValueToMetric(ProfilerType.TASK_COPY_IN_SIZE_BYTES, tasks.get(contextIndex).getId(), objectState.getBuffer().size());
>>>>>>>
long copyInValue = timeProfiler.getTimer(ProfilerType.COPY_IN_TIME);
copyInValue += event.getExecutionTime();
timeProfiler.setTimer(ProfilerType.COPY_IN_TIME, copyInValue);
timeProfiler.addValueToMetric(ProfilerType.TASK_COPY_IN_SIZE_BYTES, tasks.get(contextIndex).getId(), objectState.getBuffer().size());
long dispatchValue = timeProfiler.getTimer(ProfilerType.DISPATCH_TIME);
dispatchValue += event.getDriverDispatchTime();
timeProfiler.setTimer(ProfilerType.DISPATCH_TIME, dispatchValue);
<<<<<<<
long copyInValue = timeProfiler.getTimer(ProfilerType.COPY_IN_TIME);
copyInValue += event.getExecutionTime();
timeProfiler.setTimer(ProfilerType.COPY_IN_TIME, copyInValue);
long dispatchValue = timeProfiler.getTimer(ProfilerType.DISPATCH_TIME);
dispatchValue += event.getDriverDispatchTime();
timeProfiler.setTimer(ProfilerType.DISPATCH_TIME, dispatchValue);
=======
long value = timeProfiler.getTimer(ProfilerType.COPY_IN_TIME);
value += event.getExecutionTime();
timeProfiler.setTimer(ProfilerType.COPY_IN_TIME, value);
timeProfiler.addValueToMetric(ProfilerType.TASK_COPY_IN_SIZE_BYTES, tasks.get(contextIndex).getId(), objectState.getBuffer().size());
>>>>>>>
long copyInValue = timeProfiler.getTimer(ProfilerType.COPY_IN_TIME);
copyInValue += event.getExecutionTime();
timeProfiler.setTimer(ProfilerType.COPY_IN_TIME, copyInValue);
timeProfiler.addValueToMetric(ProfilerType.TASK_COPY_IN_SIZE_BYTES, tasks.get(contextIndex).getId(), objectState.getBuffer().size());
long dispatchValue = timeProfiler.getTimer(ProfilerType.DISPATCH_TIME);
dispatchValue += event.getDriverDispatchTime();
timeProfiler.setTimer(ProfilerType.DISPATCH_TIME, dispatchValue); |
<<<<<<<
import jdk.vm.ci.hotspot.HotSpotCallingConventionType;
import jdk.vm.ci.hotspot.HotSpotResolvedJavaField;
import jdk.vm.ci.meta.*;
=======
import static jdk.vm.ci.hotspot.HotSpotJVMCIRuntimeProvider.getArrayBaseOffset;
import static org.graalvm.compiler.nodes.NamedLocationIdentity.ARRAY_LENGTH_LOCATION;
import static uk.ac.manchester.tornado.api.exceptions.TornadoInternalError.shouldNotReachHere;
import static uk.ac.manchester.tornado.api.exceptions.TornadoInternalError.unimplemented;
import java.util.Iterator;
>>>>>>>
import static org.graalvm.compiler.nodes.NamedLocationIdentity.ARRAY_LENGTH_LOCATION;
import static uk.ac.manchester.tornado.api.exceptions.TornadoInternalError.shouldNotReachHere;
import static uk.ac.manchester.tornado.api.exceptions.TornadoInternalError.unimplemented;
import java.util.Iterator;
import jdk.vm.ci.hotspot.HotSpotCallingConventionType;
import jdk.vm.ci.hotspot.HotSpotResolvedJavaField;
import jdk.vm.ci.meta.*;
<<<<<<<
import java.util.Iterator;
import static org.graalvm.compiler.nodes.NamedLocationIdentity.ARRAY_LENGTH_LOCATION;
import static uk.ac.manchester.tornado.api.exceptions.TornadoInternalError.shouldNotReachHere;
import static uk.ac.manchester.tornado.api.exceptions.TornadoInternalError.unimplemented;
=======
>>>>>>>
import java.util.Iterator;
import static org.graalvm.compiler.nodes.NamedLocationIdentity.ARRAY_LENGTH_LOCATION;
import static uk.ac.manchester.tornado.api.exceptions.TornadoInternalError.shouldNotReachHere;
import static uk.ac.manchester.tornado.api.exceptions.TornadoInternalError.unimplemented;
<<<<<<<
private void lowerFloatConvertNode(FloatConvertNode floatConvert, LoweringTool tool) {
final StructuredGraph graph = floatConvert.graph();
// TODO should probably create a specific float-convert node?
final CastNode asFloat = graph.addWithoutUnique(new CastNode(floatConvert.stamp(NodeView.DEFAULT), floatConvert.getFloatConvert(), floatConvert.getValue()));
=======
private void lowerFloatConvertNode(FloatConvertNode floatConvert) {
final StructuredGraph graph = floatConvert.graph(); // TODO should probably create a specific float-convert node?
final CastNode asFloat = graph.addWithoutUnique(new CastNode(floatConvert.stamp(), floatConvert.getFloatConvert(), floatConvert.getValue()));
>>>>>>>
private void lowerFloatConvertNode(FloatConvertNode floatConvert) {
final StructuredGraph graph = floatConvert.graph(); // TODO should probably create a specific float-convert node?
final CastNode asFloat = graph.addWithoutUnique(new CastNode(floatConvert.stamp(NodeView.DEFAULT), floatConvert.getFloatConvert(), floatConvert.getValue())); |
<<<<<<<
import uk.ac.manchester.tornado.drivers.common.graal.compiler.DumpLowTierGraph;
=======
import uk.ac.manchester.tornado.api.TornadoDeviceContext;
>>>>>>>
import uk.ac.manchester.tornado.api.TornadoDeviceContext;
import uk.ac.manchester.tornado.drivers.common.graal.compiler.DumpLowTierGraph; |
<<<<<<<
private TornadoCoreRuntime() {
=======
private final OptionValues options;
// @formatter:off
public enum TORNADO_DRIVERS_DESCRIPTION {
OPENCL("implemented"),
PTX("unsupported");
String status;
TORNADO_DRIVERS_DESCRIPTION(String status) {
this.status = status;
}
String getStatus() {
return status;
}
}
// @formatter:on
public TornadoCoreRuntime() {
>>>>>>>
// @formatter:off
public enum TORNADO_DRIVERS_DESCRIPTION {
OPENCL("implemented"),
PTX("unsupported");
String status;
TORNADO_DRIVERS_DESCRIPTION(String status) {
this.status = status;
}
String getStatus() {
return status;
}
}
// @formatter:on
private TornadoCoreRuntime() {
<<<<<<<
=======
EconomicMap<OptionKey<?>, Object> opts = OptionValues.newOptionMap();
opts.putAll(HotSpotGraalOptionValues.HOTSPOT_OPTIONS.getMap());
opts.put(GraalOptions.OmitHotExceptionStacktrace, false);
opts.put(GraalOptions.MatchExpressions, true);
opts.put(GraalOptions.RemoveNeverExecutedCode, false);
opts.put(ConstantLoadOptimization.Options.LIROptConstantLoadOptimization, false);
opts.put(PostAllocationOptimizationStage.Options.LIROptRedundantMoveElimination, false);
options = new OptionValues(opts);
>>>>>>> |
<<<<<<<
final long endSequentialCode = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
=======
final long endSequentialCode = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
>>>>>>>
final long endSequentialCode = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
<<<<<<<
long start = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
performStreamInThread(task, streamInObjects);
=======
long start = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
performStreamInThread(task);
>>>>>>>
long start = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
performStreamInThread(task, streamInObjects);
<<<<<<<
final long end = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
taskScheduleIndex.put(taskScheduleNumber, task);
=======
final long end = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
>>>>>>>
final long end = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
taskScheduleIndex.put(taskScheduleNumber, task);
<<<<<<<
@SuppressWarnings("unused")
private void cloneInputOutputObjects() {
=======
final long startSearchProfiler = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
>>>>>>>
@SuppressWarnings("unused")
private void cloneInputOutputObjects() {
final long startSearchProfiler = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
<<<<<<<
runAllTaskSequentially();
final long endSequentialCode = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
=======
final long endSequentialCode = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
>>>>>>>
runAllTaskSequentially();
final long endSequentialCode = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
<<<<<<<
long start = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
performStreamInThread(task, streamInObjects);
=======
long start = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
performStreamInThread(task);
>>>>>>>
long start = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
performStreamInThread(task, streamInObjects);
<<<<<<<
taskScheduleIndex.put(i, task);
final long end = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
=======
final long end = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis();
>>>>>>>
taskScheduleIndex.put(i, task);
final long end = (TIME_IN_NS) ? System.nanoTime() : System.currentTimeMillis(); |
<<<<<<<
import uk.ac.manchester.tornado.api.exceptions.TornadoRuntimeException;
import uk.ac.manchester.tornado.runtime.graal.nodes.TornadoReduceAddNode;
import uk.ac.manchester.tornado.runtime.graal.nodes.TornadoReduceMulNode;
import uk.ac.manchester.tornado.runtime.graal.nodes.TornadoReduceSubNode;
=======
import uk.ac.manchester.tornado.api.exceptions.TornadoRuntimeException;
import uk.ac.manchester.tornado.runtime.graal.nodes.OCLReduceAddNode;
import uk.ac.manchester.tornado.runtime.graal.nodes.OCLReduceMulNode;
import uk.ac.manchester.tornado.runtime.graal.nodes.OCLReduceSubNode;
>>>>>>>
import uk.ac.manchester.tornado.api.exceptions.TornadoRuntimeException;
<<<<<<<
private static final String OCL_FP_BINARY_NODE = "OCLFPBinaryIntrinsicNode";
private static final String OCL_INT_BINARY_NODE = "OCLIntBinaryIntrinsicNode";
private static final String PTX_FP_BINARY_NODE = "PTXFPBinaryIntrinsicNode";
private static final String PTX_INT_BINARY_NODE = "PTXIntBinaryIntrinsicNode";
=======
private static final String OCL_FP_BINARY_NODE = "OCLFPBinaryIntrinsicNode";
>>>>>>>
private static final String OCL_FP_BINARY_NODE = "OCLFPBinaryIntrinsicNode";
private static final String OCL_INT_BINARY_NODE = "OCLIntBinaryIntrinsicNode";
private static final String PTX_FP_BINARY_NODE = "PTXFPBinaryIntrinsicNode";
private static final String PTX_INT_BINARY_NODE = "PTXIntBinaryIntrinsicNode";
<<<<<<<
String storeValueName = storeValue.getClass().getName();
if (storeValueName.endsWith(OCL_FP_BINARY_NODE) || storeValueName.endsWith(PTX_FP_BINARY_NODE)) {
=======
if (storeValue.getClass().getName().endsWith(OCL_FP_BINARY_NODE)) {
>>>>>>>
String storeValueName = storeValue.getClass().getName();
if (storeValueName.endsWith(OCL_FP_BINARY_NODE) || storeValueName.endsWith(PTX_FP_BINARY_NODE)) {
<<<<<<<
} else if (storeValueName.endsWith(OCL_FP_BINARY_NODE) || storeValueName.endsWith(PTX_FP_BINARY_NODE)) {
=======
} else if (storeValue.getClass().getName().endsWith(OCL_FP_BINARY_NODE)) {
>>>>>>>
} else if (storeValueName.endsWith(OCL_FP_BINARY_NODE) || storeValueName.endsWith(PTX_FP_BINARY_NODE)) { |
<<<<<<<
void useDefaultThreadScheduler(boolean use);
=======
void updateReference(Object oldRef, Object newRef);
>>>>>>>
void updateReference(Object oldRef, Object newRef);
void useDefaultThreadScheduler(boolean use); |
<<<<<<<
private final long id;
private final List<OCLTargetDevice> devices;
=======
private final long contextID;
private final List<OCLDevice> devices;
>>>>>>>
private final long contextID;
private final List<OCLTargetDevice> devices;
<<<<<<<
private static final int MAX_ALLOCATED_REGIONS = 64;
public OCLContext(OCLPlatform platform, long id, List<OCLTargetDevice> devices) {
=======
public OCLContext(OCLPlatform platform, long id, List<OCLDevice> devices) {
>>>>>>>
public OCLContext(OCLPlatform platform, long id, List<OCLTargetDevice> devices) { |
<<<<<<<
System.out.printf("waitlist:\n");
=======
System.out.printf("waitlist:\n");
>>>>>>>
System.out.printf("waitlist:\n");
<<<<<<<
System.out.printf("[%d] 0x%x - %s 0x%x\n",index,events[value],EVENT_DESCRIPTIONS[descriptors[value]], tags[value]);
=======
System.out.printf("[%d] 0x%x - %s 0x%x\n",index,events[value],EVENT_DESCRIPTIONS[descriptors[value]], tags[value]);
>>>>>>>
System.out.printf("[%d] 0x%x - %s 0x%x\n",index,events[value],EVENT_DESCRIPTIONS[descriptors[value]], tags[value]);
<<<<<<<
flush();
=======
flush();
>>>>>>>
flush();
<<<<<<<
=======
flush();
>>>>>>>
<<<<<<<
flush();
=======
flush();
>>>>>>>
flush();
<<<<<<<
flush();
=======
flush();
>>>>>>>
flush(); |
<<<<<<<
final MultiplexFunctionParameterEditor functionParameterEditor = panel.getFunctionParameterEditor();
final DefaultFunctionParameterEditor activeEditor = functionParameterEditor.getDefaultEditor();
=======
MultiplexFunctionParameterEditor functionParameterEditor = panel.getFunctionParameterEditor();
DefaultFunctionParameterEditor activeEditor = functionParameterEditor.getDefaultEditor();
activeEditor.addParameterUpdateListener(panel.getParameterUpdateHandler());
>>>>>>>
final MultiplexFunctionParameterEditor functionParameterEditor = panel.getFunctionParameterEditor();
final DefaultFunctionParameterEditor activeEditor = functionParameterEditor.getDefaultEditor();
activeEditor.addParameterUpdateListener(panel.getParameterUpdateHandler()); |
<<<<<<<
=======
>>>>>>>
<<<<<<<
=======
>>>>>>> |
<<<<<<<
public static File createTestOutputFile()
{
return createTestOutputFile(null);
}
public static File createTestOutputFile(String name)
{
final File file = new File("test-output");
//noinspection ResultOfMethodCallIgnored
file.mkdir();
if (StringUtils.isEmpty(name, true))
{
return file;
}
return new File(file, name);
}
=======
private static class MultiPageFlowSelector implements PageFlowSelector
{
private Set<Integer> acceptedPage;
private boolean logicalPage;
public MultiPageFlowSelector(final boolean logicalPage, final int... acceptedPage)
{
this.acceptedPage = new HashSet<Integer>();
for (int page : acceptedPage)
{
this.acceptedPage.add(page);
}
this.logicalPage = logicalPage;
}
public MultiPageFlowSelector(final int acceptedPage)
{
this(true, acceptedPage);
}
public boolean isPhysicalPageAccepted(final PhysicalPageKey key)
{
if (key == null)
{
return false;
}
return logicalPage == false && acceptedPage.contains(key.getSequentialPageNumber());
}
public boolean isLogicalPageAccepted(final LogicalPageKey key)
{
if (key == null)
{
return false;
}
return logicalPage && acceptedPage.contains(key.getPosition());
}
}
>>>>>>>
private static class MultiPageFlowSelector implements PageFlowSelector
{
private Set<Integer> acceptedPage;
private boolean logicalPage;
public MultiPageFlowSelector(final boolean logicalPage, final int... acceptedPage)
{
this.acceptedPage = new HashSet<Integer>();
for (int page : acceptedPage)
{
this.acceptedPage.add(page);
}
this.logicalPage = logicalPage;
}
public MultiPageFlowSelector(final int acceptedPage)
{
this(true, acceptedPage);
}
public boolean isPhysicalPageAccepted(final PhysicalPageKey key)
{
if (key == null)
{
return false;
}
return logicalPage == false && acceptedPage.contains(key.getSequentialPageNumber());
}
public boolean isLogicalPageAccepted(final LogicalPageKey key)
{
if (key == null)
{
return false;
}
return logicalPage && acceptedPage.contains(key.getPosition());
}
}
public static File createTestOutputFile()
{
return createTestOutputFile(null);
}
public static File createTestOutputFile(String name)
{
final File file = new File("test-output");
//noinspection ResultOfMethodCallIgnored
file.mkdir();
if (StringUtils.isEmpty(name, true))
{
return file;
}
return new File(file, name);
} |
<<<<<<<
public And<StringSubject> contains(String string) {
if (!getSubject().contains(string)) {
=======
public StringSubject contains(String string) {
if (getSubject() == null) {
if (string != null) {
fail("contains", string);
}
} else if (!getSubject().contains(string)) {
>>>>>>>
public And<StringSubject> contains(String string) {
if (getSubject() == null) {
if (string != null) {
fail("contains", string);
}
} else if (!getSubject().contains(string)) { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.