conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
/**
* This method will return a boolean indicating whether the current user is allowed to register a new
* account or not
* @param context The relevant DSpace context
* @param request The current request
* @return A boolean indicating whether the current user can register a new account or not
* @throws SQLException If something goes wrong
*/
public static boolean authorizeNewAccountRegistration(Context context, HttpServletRequest request)
throws SQLException {
if (DSpaceServicesFactory.getInstance().getConfigurationService()
.getBooleanProperty("user.registration", true)) {
// This allowSetPassword is currently the only mthod that would return true only when it's
// actually expected to be returning true.
// For example the LDAP canSelfRegister will return true due to auto-register, while that
// does not imply a new user can register explicitly
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
.allowSetPassword(context, request, null);
}
return false;
}
/**
* This method will return a boolean indicating whether it's allowed to update the password for the EPerson
* with the given email and canLogin property
* @param context The relevant DSpace context
* @param email The email to be checked
* @return A boolean indicating if the password can be updated or not
*/
public static boolean authorizeUpdatePassword(Context context, String email) {
try {
EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, email);
if (eperson != null && eperson.canLogIn()) {
HttpServletRequest request = new DSpace().getRequestService().getCurrentRequest()
.getHttpServletRequest();
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
.allowSetPassword(context, request, null);
}
} catch (SQLException e) {
log.error("Something went wrong trying to retrieve EPerson for email: " + email, e);
}
return false;
}
=======
/**
* This method checks if the community Admin can manage accounts
*
* @return true if is able
*/
public static boolean canCommunityAdminManageAccounts() {
boolean isAble = false;
if (AuthorizeConfiguration.canCommunityAdminManagePolicies()
|| AuthorizeConfiguration.canCommunityAdminManageAdminGroup()
|| AuthorizeConfiguration.canCommunityAdminManageCollectionPolicies()
|| AuthorizeConfiguration.canCommunityAdminManageCollectionSubmitters()
|| AuthorizeConfiguration.canCommunityAdminManageCollectionWorkflows()
|| AuthorizeConfiguration.canCommunityAdminManageCollectionAdminGroup()) {
isAble = true;
}
return isAble;
}
/**
* This method checks if the Collection Admin can manage accounts
*
* @return true if is able
*/
public static boolean canCollectionAdminManageAccounts() {
boolean isAble = false;
if (AuthorizeConfiguration.canCollectionAdminManagePolicies()
|| AuthorizeConfiguration.canCollectionAdminManageSubmitters()
|| AuthorizeConfiguration.canCollectionAdminManageWorkflows()
|| AuthorizeConfiguration.canCollectionAdminManageAdminGroup()) {
isAble = true;
}
return isAble;
}
>>>>>>>
/**
* This method will return a boolean indicating whether the current user is allowed to register a new
* account or not
* @param context The relevant DSpace context
* @param request The current request
* @return A boolean indicating whether the current user can register a new account or not
* @throws SQLException If something goes wrong
*/
public static boolean authorizeNewAccountRegistration(Context context, HttpServletRequest request)
throws SQLException {
if (DSpaceServicesFactory.getInstance().getConfigurationService()
.getBooleanProperty("user.registration", true)) {
// This allowSetPassword is currently the only mthod that would return true only when it's
// actually expected to be returning true.
// For example the LDAP canSelfRegister will return true due to auto-register, while that
// does not imply a new user can register explicitly
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
.allowSetPassword(context, request, null);
}
return false;
}
/**
* This method will return a boolean indicating whether it's allowed to update the password for the EPerson
* with the given email and canLogin property
* @param context The relevant DSpace context
* @param email The email to be checked
* @return A boolean indicating if the password can be updated or not
*/
public static boolean authorizeUpdatePassword(Context context, String email) {
try {
EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, email);
if (eperson != null && eperson.canLogIn()) {
HttpServletRequest request = new DSpace().getRequestService().getCurrentRequest()
.getHttpServletRequest();
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
.allowSetPassword(context, request, null);
}
} catch (SQLException e) {
log.error("Something went wrong trying to retrieve EPerson for email: " + email, e);
}
return false;
}
/**
* This method checks if the community Admin can manage accounts
*
* @return true if is able
*/
public static boolean canCommunityAdminManageAccounts() {
boolean isAble = false;
if (AuthorizeConfiguration.canCommunityAdminManagePolicies()
|| AuthorizeConfiguration.canCommunityAdminManageAdminGroup()
|| AuthorizeConfiguration.canCommunityAdminManageCollectionPolicies()
|| AuthorizeConfiguration.canCommunityAdminManageCollectionSubmitters()
|| AuthorizeConfiguration.canCommunityAdminManageCollectionWorkflows()
|| AuthorizeConfiguration.canCommunityAdminManageCollectionAdminGroup()) {
isAble = true;
}
return isAble;
}
/**
* This method checks if the Collection Admin can manage accounts
*
* @return true if is able
*/
public static boolean canCollectionAdminManageAccounts() {
boolean isAble = false;
if (AuthorizeConfiguration.canCollectionAdminManagePolicies()
|| AuthorizeConfiguration.canCollectionAdminManageSubmitters()
|| AuthorizeConfiguration.canCollectionAdminManageWorkflows()
|| AuthorizeConfiguration.canCollectionAdminManageAdminGroup()) {
isAble = true;
}
return isAble;
} |
<<<<<<<
@Override
public WorkspaceItem findDomainObjectByPk(Context context, Integer id) throws SQLException {
return wis.find(context, id);
}
=======
/**
* This is a search method that will return the WorkspaceItemRest object found through the UUID of an item. It'll
* find the Item through the given UUID and try to resolve the WorkspaceItem relevant for that item and return it.
* It'll return a 401/403 if the current user isn't allowed to view the WorkspaceItem.
* It'll return a 204 if nothing was found
* @param itemUuid The UUID for the Item to be used
* @param pageable The pageable if present
* @return The resulting WorkspaceItem object
*/
@SearchRestMethod(name = "item")
public WorkspaceItemRest findByItemUuid(@Parameter(value = "uuid", required = true) UUID itemUuid,
Pageable pageable) {
try {
Context context = obtainContext();
Item item = itemService.find(context, itemUuid);
WorkspaceItem workspaceItem = wis.findByItem(context, item);
if (workspaceItem == null) {
return null;
}
if (!authorizeService.authorizeActionBoolean(context, workspaceItem.getItem(), Constants.READ)) {
throw new AccessDeniedException("The current user does not have rights to view the WorkflowItem");
}
return converter.toRest(workspaceItem, utils.obtainProjection());
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
>>>>>>>
/**
* This is a search method that will return the WorkspaceItemRest object found through the UUID of an item. It'll
* find the Item through the given UUID and try to resolve the WorkspaceItem relevant for that item and return it.
* It'll return a 401/403 if the current user isn't allowed to view the WorkspaceItem.
* It'll return a 204 if nothing was found
* @param itemUuid The UUID for the Item to be used
* @param pageable The pageable if present
* @return The resulting WorkspaceItem object
*/
@SearchRestMethod(name = "item")
public WorkspaceItemRest findByItemUuid(@Parameter(value = "uuid", required = true) UUID itemUuid,
Pageable pageable) {
try {
Context context = obtainContext();
Item item = itemService.find(context, itemUuid);
WorkspaceItem workspaceItem = wis.findByItem(context, item);
if (workspaceItem == null) {
return null;
}
if (!authorizeService.authorizeActionBoolean(context, workspaceItem.getItem(), Constants.READ)) {
throw new AccessDeniedException("The current user does not have rights to view the WorkflowItem");
}
return converter.toRest(workspaceItem, utils.obtainProjection());
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
public WorkspaceItem findDomainObjectByPk(Context context, Integer id) throws SQLException {
return wis.find(context, id);
} |
<<<<<<<
=======
List<BundleRest> bundles;
List<RelationshipRest> relationships;
>>>>>>>
List<BundleRest> bundles; |
<<<<<<<
public static final int MAIN_PAGE = 0;
public static final int REJECT_PAGE = 1;
public static final int SUBMITTER_IS_DELETED_PAGE = 2;
=======
private static final String SUBMIT_APPROVE = "submit_approve";
private static final String SUBMIT_REJECT = "submit_reject";
>>>>>>>
private static final String SUBMIT_APPROVE = "submit_approve";
private static final String SUBMIT_REJECT = "submit_reject";
private static final String SUBMITTER_IS_DELETED_PAGE = "submitter_deleted";
<<<<<<<
throws SQLException, AuthorizeException, IOException {
int page = Util.getIntParameter(request, "page");
switch (page) {
case MAIN_PAGE:
return processMainPage(c, wfi, step, request);
case REJECT_PAGE:
return processRejectPage(c, wfi, step, request);
case SUBMITTER_IS_DELETED_PAGE:
return processSubmitterIsDeletedPage(c, wfi, request);
default:
return new ActionResult(ActionResult.TYPE.TYPE_CANCEL);
=======
throws SQLException, AuthorizeException, IOException {
if (super.isOptionInParam(request)) {
switch (Util.getSubmitButton(request, SUBMIT_CANCEL)) {
case SUBMIT_APPROVE:
return processAccept(c, wfi);
case SUBMIT_REJECT:
return processRejectPage(c, wfi, request);
default:
return new ActionResult(ActionResult.TYPE.TYPE_CANCEL);
}
>>>>>>>
throws SQLException, AuthorizeException, IOException {
if (super.isOptionInParam(request)) {
switch (Util.getSubmitButton(request, SUBMIT_CANCEL)) {
case SUBMIT_APPROVE:
return processAccept(c, wfi);
case SUBMIT_REJECT:
return processRejectPage(c, wfi, request);
case SUBMITTER_IS_DELETED_PAGE:
return processSubmitterIsDeletedPage(c, wfi, request);
default:
return new ActionResult(ActionResult.TYPE.TYPE_CANCEL);
}
<<<<<<<
public ActionResult processMainPage(Context c, XmlWorkflowItem wfi, Step step, HttpServletRequest request)
throws SQLException, AuthorizeException {
if (request.getParameter("submit_approve") != null) {
//Delete the tasks
addApprovedProvenance(c, wfi);
return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE);
} else if (request.getParameter("submit_reject") != null) {
// Make sure we indicate which page we want to process
if (wfi.getSubmitter() == null) {
request.setAttribute("page", SUBMITTER_IS_DELETED_PAGE);
} else {
request.setAttribute("page", REJECT_PAGE);
}
// We have pressed reject item, so take the user to a page where he can reject
return new ActionResult(ActionResult.TYPE.TYPE_PAGE);
} else {
//We pressed the leave button so return to our submissions page
return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE);
}
=======
@Override
public List<String> getOptions() {
List<String> options = new ArrayList<>();
options.add(SUBMIT_APPROVE);
options.add(SUBMIT_REJECT);
options.add(ProcessingAction.SUBMIT_EDIT_METADATA);
return options;
>>>>>>>
@Override
public List<String> getOptions() {
List<String> options = new ArrayList<>();
options.add(SUBMIT_APPROVE);
options.add(SUBMIT_REJECT);
options.add(ProcessingAction.SUBMIT_EDIT_METADATA);
return options; |
<<<<<<<
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.hamcrest.Matchers.allOf;
=======
import static org.hamcrest.Matchers.hasSize;
>>>>>>>
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.hasSize; |
<<<<<<<
=======
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
>>>>>>>
import com.google.common.collect.ImmutableList; |
<<<<<<<
import com.google.gerrit.entities.BranchNameKey;
import com.google.gerrit.entities.Project;
=======
import com.google.gerrit.exceptions.StorageException;
>>>>>>>
import com.google.gerrit.entities.BranchNameKey;
import com.google.gerrit.entities.Project;
import com.google.gerrit.exceptions.StorageException; |
<<<<<<<
=======
import org.dspace.app.rest.projection.Projection;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
>>>>>>>
import org.dspace.app.rest.projection.Projection;
<<<<<<<
GroupRestRepository(GroupService dsoService,
GroupConverter dsoConverter) {
super(dsoService, dsoConverter);
=======
GroupRestRepository(GroupService dsoService) {
super(dsoService, new DSpaceObjectPatch<GroupRest>() {});
>>>>>>>
GroupRestRepository(GroupService dsoService) {
super(dsoService); |
<<<<<<<
/**
* This method will update the place for the Relationship and all other relationships found by the items and
* relationship type of the given Relatonship. It will give this Relationship the last place in both the
* left and right place determined by querying for the list of leftRelationships and rightRelationships
* by the leftItem, rightItem and relationshipType of the given Relationship.
* @param context The relevant DSpace context
* @param relationship The Relationship object that will have it's place updated and that will be used
* to retrieve the other relationships whose place might need to be updated
* @throws SQLException If something goes wrong
*/
public void updatePlaceInRelationship(Context context, Relationship relationship) throws SQLException;
=======
/**
* This method returns a list of Relationship objets for which the relationshipType property is equal to the given
* RelationshipType object
* @param context The relevant DSpace context
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @return The list of Relationship objects for which the given RelationshipType object is equal
* to the relationshipType property
* @throws SQLException If something goes wrong
*/
List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType) throws SQLException;
>>>>>>>
/**
* This method will update the place for the Relationship and all other relationships found by the items and
* relationship type of the given Relatonship. It will give this Relationship the last place in both the
* left and right place determined by querying for the list of leftRelationships and rightRelationships
* by the leftItem, rightItem and relationshipType of the given Relationship.
* @param context The relevant DSpace context
* @param relationship The Relationship object that will have it's place updated and that will be used
* to retrieve the other relationships whose place might need to be updated
* @throws SQLException If something goes wrong
*/
public void updatePlaceInRelationship(Context context, Relationship relationship) throws SQLException;
/**
* This method returns a list of Relationship objets for which the relationshipType property is equal to the given
* RelationshipType object
* @param context The relevant DSpace context
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @return The list of Relationship objects for which the given RelationshipType object is equal
* to the relationshipType property
* @throws SQLException If something goes wrong
*/
List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType) throws SQLException; |
<<<<<<<
* Internal method to execute POST with text/uri-list MediaType;
*
* @param request The relevant request
* @param apiCategory The apiCategory to be used
* @param model The model to be used
* @return The relevant ResponseEntity for this request
* @throws HttpRequestMethodNotSupportedException If something goes wrong
*/
public <ID extends Serializable> ResponseEntity<ResourceSupport> postUriListInternal(HttpServletRequest request,
String apiCategory,
String model)
throws HttpRequestMethodNotSupportedException {
checkModelPluralForm(apiCategory, model);
DSpaceRestRepository<RestAddressableModel, ID> repository = utils.getResourceRepository(apiCategory, model);
RestAddressableModel modelObject = null;
List<DSpaceObject> dSpaceObjectList = utils.getdSpaceObjectsFromRequest(request);
try {
modelObject = repository.createAndReturn(dSpaceObjectList);
} catch (ClassCastException e) {
log.error("Something went wrong whilst creating the object for apiCategory: " + apiCategory +
" and model: " + model, e);
return ControllerUtils.toEmptyResponse(HttpStatus.INTERNAL_SERVER_ERROR);
}
if (modelObject == null) {
throw new HttpRequestMethodNotSupportedException(RequestMethod.POST.toString());
}
DSpaceResource result = repository.wrapResource(modelObject);
linkService.addLinks(result);
//TODO manage HTTPHeader
return ControllerUtils.toResponseEntity(HttpStatus.CREATED, null, result);
}
/**
=======
* Called in POST, with a x-www-form-urlencoded, execute an action on a resource
*
* Note that the regular expression in the request mapping accept a number as identifier;
*
* @param request
* @param apiCategory
* @param model
* @param id
* @return
* @throws HttpRequestMethodNotSupportedException
* @throws IOException
* @throws SQLException
*/
@RequestMapping(method = RequestMethod.POST, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT, headers =
"content-type=application/x-www-form-urlencoded")
public ResponseEntity<ResourceSupport> action(HttpServletRequest request, @PathVariable String apiCategory,
@PathVariable String model, @PathVariable Integer id)
throws HttpRequestMethodNotSupportedException, SQLException, IOException {
checkModelPluralForm(apiCategory, model);
DSpaceRestRepository<RestAddressableModel, Integer> repository =
utils.getResourceRepository(apiCategory, model);
RestAddressableModel modelObject = null;
try {
modelObject = repository.action(request, id);
} catch (UnprocessableEntityException e) {
log.error(e.getMessage(), e);
return ControllerUtils.toEmptyResponse(HttpStatus.UNPROCESSABLE_ENTITY);
}
if (modelObject != null) {
DSpaceResource result = repository.wrapResource(modelObject);
linkService.addLinks(result);
return ControllerUtils.toResponseEntity(HttpStatus.CREATED, null, result);
} else {
return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT);
}
}
/**
>>>>>>>
* Internal method to execute POST with text/uri-list MediaType;
*
* @param request The relevant request
* @param apiCategory The apiCategory to be used
* @param model The model to be used
* @return The relevant ResponseEntity for this request
* @throws HttpRequestMethodNotSupportedException If something goes wrong
*/
public <ID extends Serializable> ResponseEntity<ResourceSupport> postUriListInternal(HttpServletRequest request,
String apiCategory,
String model)
throws HttpRequestMethodNotSupportedException {
checkModelPluralForm(apiCategory, model);
DSpaceRestRepository<RestAddressableModel, ID> repository = utils.getResourceRepository(apiCategory, model);
RestAddressableModel modelObject = null;
List<DSpaceObject> dSpaceObjectList = utils.getdSpaceObjectsFromRequest(request);
try {
modelObject = repository.createAndReturn(dSpaceObjectList);
} catch (ClassCastException e) {
log.error("Something went wrong whilst creating the object for apiCategory: " + apiCategory +
" and model: " + model, e);
return ControllerUtils.toEmptyResponse(HttpStatus.INTERNAL_SERVER_ERROR);
}
if (modelObject == null) {
throw new HttpRequestMethodNotSupportedException(RequestMethod.POST.toString());
}
DSpaceResource result = repository.wrapResource(modelObject);
linkService.addLinks(result);
//TODO manage HTTPHeader
return ControllerUtils.toResponseEntity(HttpStatus.CREATED, null, result);
}
/**
* Called in POST, with a x-www-form-urlencoded, execute an action on a resource
*
* Note that the regular expression in the request mapping accept a number as identifier;
*
* @param request
* @param apiCategory
* @param model
* @param id
* @return
* @throws HttpRequestMethodNotSupportedException
* @throws IOException
* @throws SQLException
*/
@RequestMapping(method = RequestMethod.POST, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT, headers =
"content-type=application/x-www-form-urlencoded")
public ResponseEntity<ResourceSupport> action(HttpServletRequest request, @PathVariable String apiCategory,
@PathVariable String model, @PathVariable Integer id)
throws HttpRequestMethodNotSupportedException, SQLException, IOException {
checkModelPluralForm(apiCategory, model);
DSpaceRestRepository<RestAddressableModel, Integer> repository =
utils.getResourceRepository(apiCategory, model);
RestAddressableModel modelObject = null;
try {
modelObject = repository.action(request, id);
} catch (UnprocessableEntityException e) {
log.error(e.getMessage(), e);
return ControllerUtils.toEmptyResponse(HttpStatus.UNPROCESSABLE_ENTITY);
}
if (modelObject != null) {
DSpaceResource result = repository.wrapResource(modelObject);
linkService.addLinks(result);
return ControllerUtils.toResponseEntity(HttpStatus.CREATED, null, result);
} else {
return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT);
}
}
/** |
<<<<<<<
super(cfg, sitePaths, schema, clientBuilder, ACCOUNTS_PREFIX);
=======
// No parts of FillArgs are currently required, just use null.
super(cfg, null, sitePaths, schema, clientBuilder, ACCOUNTS);
>>>>>>>
super(cfg, sitePaths, schema, clientBuilder, ACCOUNTS); |
<<<<<<<
ProcessRest processRest = scriptRestRepository.startProcess(scriptName, files);
ProcessResource processResource = new ProcessResource(processRest, utils, null);
halLinkService.addLinks(processResource);
=======
ProcessRest processRest = scriptRestRepository.startProcess(scriptName);
ProcessResource processResource = converter.toResource(processRest);
>>>>>>>
ProcessRest processRest = scriptRestRepository.startProcess(scriptName, files);
ProcessResource processResource = converter.toResource(processRest); |
<<<<<<<
import com.google.gerrit.reviewdb.client.Account;
=======
import com.google.gerrit.reviewdb.client.Account.Id;
import com.google.gerrit.testing.ConfigSuite;
import org.eclipse.jgit.lib.Config;
>>>>>>>
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.testing.ConfigSuite;
import org.eclipse.jgit.lib.Config;
<<<<<<<
adminSshSession.exec(
String.format("gerrit set-reviewers -%s %s %s", add ? "a" : "r", user.email(), id));
adminSshSession.assertSuccess();
ImmutableSet<Account.Id> reviewers = change.getChange().getReviewers().all();
=======
session.exec(String.format("gerrit set-reviewers -%s %s %s", add ? "a" : "r", user.email, id));
session.assertSuccess();
ImmutableSet<Id> reviewers = change.getChange().getReviewers().all();
>>>>>>>
session.exec(
String.format("gerrit set-reviewers -%s %s %s", add ? "a" : "r", user.email(), id));
session.assertSuccess();
ImmutableSet<Account.Id> reviewers = change.getChange().getReviewers().all(); |
<<<<<<<
/**
* Modify a template Item which is a template Item
* @param item The Item to be modified
* @param jsonNode The patch to be applied
* @return The Item as it is after applying the patch
* @throws SQLException
* @throws AuthorizeException
*/
public ItemRest patchTemplateItem(Item item, JsonNode jsonNode)
throws SQLException, AuthorizeException {
ObjectMapper mapper = new ObjectMapper();
JsonPatchConverter patchConverter = new JsonPatchConverter(mapper);
Patch patch = patchConverter.convert(jsonNode);
patchDSpaceObject(ItemRest.CATEGORY, ItemRest.NAME, item.getID(), patch);
return converter.toRest(item, Projection.DEFAULT);
}
/**
* Remove an Item which is a template for a Collection.
*
* Note: The caller is responsible for checking that this item is in fact a template item.
*
* @param context
* @param item The item to be removed
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
public void removeTemplateItem(Context context, Item item) throws SQLException, IOException, AuthorizeException {
Collection collection = item.getTemplateItemOf();
collectionService.removeTemplateItem(context, collection);
collectionService.update(context, collection);
}
=======
>>>>>>> |
<<<<<<<
import org.dspace.app.rest.builder.ItemBuilder;
=======
import org.dspace.app.rest.builder.GroupBuilder;
>>>>>>>
import org.dspace.app.rest.builder.GroupBuilder;
import org.dspace.app.rest.builder.ItemBuilder; |
<<<<<<<
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.services.factory.DSpaceServicesFactory;
=======
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService;
>>>>>>>
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService;
<<<<<<<
/**
* This method will return a boolean indicating whether the current user is allowed to register a new
* account or not
* @param context The relevant DSpace context
* @param request The current request
* @return A boolean indicating whether the current user can register a new account or not
* @throws SQLException If something goes wrong
*/
public static boolean authorizeNewAccountRegistration(Context context, HttpServletRequest request)
throws SQLException {
if (DSpaceServicesFactory.getInstance().getConfigurationService()
.getBooleanProperty("user.registration", true)) {
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
.allowSetPassword(context, request, null);
}
return false;
}
/**
* This method will return a boolean indicating whether it's allowed to update the password for the EPerson
* with the given email and canLogin property
* @param context The relevant DSpace context
* @param email The email to be checked
* @return A boolean indicating if the password can be updated or not
*/
public static boolean authorizeUpdatePassword(Context context, String email) {
try {
EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, email);
if (eperson != null && eperson.canLogIn()) {
return true;
}
} catch (SQLException e) {
log.error("Something went wrong trying to retrieve EPerson for email: " + email, e);
}
return false;
}
=======
/**
* This method will check whether the current user is authorized to manage the default read group
* @param context The relevant DSpace context
* @param collection The collection for which this will be checked
* @throws AuthorizeException If something goes wrong
* @throws SQLException If something goes wrong
*/
public static void authorizeManageDefaultReadGroup(Context context,
Collection collection) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
authorizeService.authorizeAction(context, collection, Constants.ADMIN);
}
/**
* This method checks whether the current user has sufficient rights to modify the group.
* Depending on the kind of group and due to delegated administration, separate checks need to be done to verify
* whether the user is allowed to modify the group.
*
* @param context the context of which the user will be checked
* @param group the group to be checked
* @throws SQLException
* @throws AuthorizeException
*/
public static void authorizeManageGroup(Context context, Group group) throws SQLException, AuthorizeException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
CollectionRoleService collectionRoleService = XmlWorkflowServiceFactory.getInstance()
.getCollectionRoleService();
if (authorizeService.isAdmin(context)) {
return;
}
DSpaceObject parentObject = groupService.getParentObject(context, group);
if (parentObject == null) {
throw new AuthorizeException("not authorized to manage this group");
}
if (parentObject.getType() == Constants.COLLECTION) {
Collection collection = (Collection) parentObject;
if (group.equals(collection.getSubmitters())) {
authorizeManageSubmittersGroup(context, collection);
return;
}
List<CollectionRole> collectionRoles = collectionRoleService.findByCollection(context, collection);
for (CollectionRole role : collectionRoles) {
if (group.equals(role.getGroup())) {
authorizeManageWorkflowsGroup(context, collection);
return;
}
}
if (group.equals(collection.getAdministrators())) {
authorizeManageAdminGroup(context, collection);
return;
}
}
if (parentObject.getType() == Constants.COMMUNITY) {
Community community = (Community) parentObject;
authorizeManageAdminGroup(context, community);
return;
}
throw new AuthorizeException("not authorized to manage this group");
}
>>>>>>>
/**
* This method will check whether the current user is authorized to manage the default read group
* @param context The relevant DSpace context
* @param collection The collection for which this will be checked
* @throws AuthorizeException If something goes wrong
* @throws SQLException If something goes wrong
*/
public static void authorizeManageDefaultReadGroup(Context context,
Collection collection) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
authorizeService.authorizeAction(context, collection, Constants.ADMIN);
}
/**
* This method checks whether the current user has sufficient rights to modify the group.
* Depending on the kind of group and due to delegated administration, separate checks need to be done to verify
* whether the user is allowed to modify the group.
*
* @param context the context of which the user will be checked
* @param group the group to be checked
* @throws SQLException
* @throws AuthorizeException
*/
public static void authorizeManageGroup(Context context, Group group) throws SQLException, AuthorizeException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
CollectionRoleService collectionRoleService = XmlWorkflowServiceFactory.getInstance()
.getCollectionRoleService();
if (authorizeService.isAdmin(context)) {
return;
}
DSpaceObject parentObject = groupService.getParentObject(context, group);
if (parentObject == null) {
throw new AuthorizeException("not authorized to manage this group");
}
if (parentObject.getType() == Constants.COLLECTION) {
Collection collection = (Collection) parentObject;
if (group.equals(collection.getSubmitters())) {
authorizeManageSubmittersGroup(context, collection);
return;
}
List<CollectionRole> collectionRoles = collectionRoleService.findByCollection(context, collection);
for (CollectionRole role : collectionRoles) {
if (group.equals(role.getGroup())) {
authorizeManageWorkflowsGroup(context, collection);
return;
}
}
if (group.equals(collection.getAdministrators())) {
authorizeManageAdminGroup(context, collection);
return;
}
}
if (parentObject.getType() == Constants.COMMUNITY) {
Community community = (Community) parentObject;
authorizeManageAdminGroup(context, community);
return;
}
throw new AuthorizeException("not authorized to manage this group");
}
/**
* This method will return a boolean indicating whether the current user is allowed to register a new
* account or not
* @param context The relevant DSpace context
* @param request The current request
* @return A boolean indicating whether the current user can register a new account or not
* @throws SQLException If something goes wrong
*/
public static boolean authorizeNewAccountRegistration(Context context, HttpServletRequest request)
throws SQLException {
if (DSpaceServicesFactory.getInstance().getConfigurationService()
.getBooleanProperty("user.registration", true)) {
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
.allowSetPassword(context, request, null);
}
return false;
}
/**
* This method will return a boolean indicating whether it's allowed to update the password for the EPerson
* with the given email and canLogin property
* @param context The relevant DSpace context
* @param email The email to be checked
* @return A boolean indicating if the password can be updated or not
*/
public static boolean authorizeUpdatePassword(Context context, String email) {
try {
EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, email);
if (eperson != null && eperson.canLogIn()) {
return true;
}
} catch (SQLException e) {
log.error("Something went wrong trying to retrieve EPerson for email: " + email, e);
}
return false;
} |
<<<<<<<
=======
import org.dspace.app.rest.model.DirectlyAddressableRestModel;
>>>>>>> |
<<<<<<<
import com.google.common.collect.Iterables;
=======
import com.google.common.collect.Lists;
>>>>>>>
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
<<<<<<<
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
=======
import com.google.gerrit.reviewdb.client.PatchSetApproval;
>>>>>>>
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
<<<<<<<
import com.google.gwtorm.server.OrmException;
=======
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.SchemaFactory;
>>>>>>>
import com.google.gwtorm.server.OrmException; |
<<<<<<<
public DiscoverQuery prepareQuery(DSpaceObject scope) throws UIException, SearchServiceException {
this.queryArgs = new DiscoverQuery();
// possibly an argument
int page = getParameterPage();
String query = getQuery();
List<String> filterQueries = new ArrayList<String>();
=======
public void performSearch(DSpaceObject scope) throws UIException, SearchServiceException {
if (queryResults != null)
{
return;
}
String query = getQuery();
// Escape any special characters in this user-entered query
query = DiscoveryUIUtils.escapeQueryChars(query);
int page = getParameterPage();
List<String> filterQueries = new ArrayList<String>();
>>>>>>>
public DiscoverQuery prepareQuery(DSpaceObject scope) throws UIException, SearchServiceException {
this.queryArgs = new DiscoverQuery();
// possibly an argument
int page = getParameterPage();
String query = getQuery();
// Escape any special characters in this user-entered query
query = DiscoveryUIUtils.escapeQueryChars(query);
List<String> filterQueries = new ArrayList<String>(); |
<<<<<<<
import org.dspace.app.rest.converter.BitstreamConverter;
import org.dspace.app.rest.converter.CollectionConverter;
import org.dspace.app.rest.converter.ItemConverter;
import org.dspace.app.rest.converter.MetadataConverter;
=======
>>>>>>>
import org.dspace.app.rest.converter.BitstreamConverter;
import org.dspace.app.rest.converter.CollectionConverter;
import org.dspace.app.rest.converter.ItemConverter;
import org.dspace.app.rest.converter.MetadataConverter;
<<<<<<<
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.model.hateoas.CollectionResource;
=======
>>>>>>>
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.model.hateoas.CollectionResource;
<<<<<<<
@Autowired
private ItemConverter itemConverter;
@Autowired
private ItemService itemService;
public CollectionRestRepository(CollectionService dsoService,
CollectionConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<CollectionRest>() {});
=======
public CollectionRestRepository(CollectionService dsoService) {
super(dsoService, new DSpaceObjectPatch<CollectionRest>() {});
this.cs = dsoService;
>>>>>>>
@Autowired
private ItemConverter itemConverter;
@Autowired
private ItemService itemService;
public CollectionRestRepository(CollectionService dsoService) {
super(dsoService, new DSpaceObjectPatch<CollectionRest>() {}); |
<<<<<<<
import org.dspace.util.SolrUtils;
=======
import org.dspace.workflow.WorkflowItem;
import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
import org.dspace.xmlworkflow.storedcomponents.PoolTask;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
>>>>>>>
import org.dspace.util.SolrUtils;
import org.dspace.workflow.WorkflowItem;
import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
import org.dspace.xmlworkflow.storedcomponents.PoolTask;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService; |
<<<<<<<
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
=======
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
>>>>>>>
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
<<<<<<<
public List<RelationshipMetadataValue> getRelationshipMetadata(Item item, boolean extra) {
=======
public List<MetadataValue> getRelationshipMetadata(Item item, boolean enableVirtualMetadata) {
>>>>>>>
public List<RelationshipMetadataValue> getRelationshipMetadata(Item item, boolean extra) {
<<<<<<<
fullMetadataValueList
.addAll(handleItemRelationship(context, item, entityType, relationship, extra));
=======
fullMetadataValueList.addAll(handleItemRelationship(context, item, entityType,
relationship, enableVirtualMetadata));
>>>>>>>
fullMetadataValueList
.addAll(handleItemRelationship(context, item, entityType, relationship, extra));
<<<<<<<
private List<RelationshipMetadataValue> handleItemRelationship(Context context, Item item, String entityType,
Relationship relationship, boolean extra)
throws SQLException {
List<RelationshipMetadataValue> resultingMetadataValueList = new LinkedList<>();
=======
private List<MetadataValue> handleItemRelationship(Context context, Item item, String entityType,
Relationship relationship, boolean enableVirtualMetadata)
throws SQLException {
List<MetadataValue> resultingMetadataValueList = new LinkedList<>();
>>>>>>>
private List<RelationshipMetadataValue> handleItemRelationship(Context context, Item item, String entityType,
Relationship relationship,
boolean enableVirtualMetadata)
throws SQLException {
List<RelationshipMetadataValue> resultingMetadataValueList = new LinkedList<>();
<<<<<<<
RelationshipMetadataValue metadataValue = constructMetadataValue(context, key);
metadataValue = constructResultingMetadataValue(context, item, otherItem, virtualBean, metadataValue,
relationshipId);
metadataValue.setUseForPlace(virtualBean.getUseForPlace());
metadataValue.setPlace(place);
if (StringUtils.isNotBlank(metadataValue.getValue())) {
resultingMetadataValueList.add(metadataValue);
=======
for (String value : virtualBean.getValues(context, otherItem)) {
MetadataValue metadataValue = constructMetadataValue(key);
metadataValue = constructResultingMetadataValue(item, value, metadataValue);
if (StringUtils.isNotBlank(metadataValue.getValue())) {
resultingMetadataValueList.add(metadataValue);
}
>>>>>>>
for (String value : virtualBean.getValues(context, otherItem)) {
RelationshipMetadataValue metadataValue = constructMetadataValue(context, key);
metadataValue = constructResultingMetadataValue(item, value, metadataValue, relationshipId);
metadataValue.setUseForPlace(virtualBean.getUseForPlace());
metadataValue.setPlace(place);
if (StringUtils.isNotBlank(metadataValue.getValue())) {
resultingMetadataValueList.add(metadataValue);
}
<<<<<<<
private RelationshipMetadataValue getRelationMetadataFromOtherItem(Context context, Item otherItem,
String relationName,
Integer relationshipId) {
RelationshipMetadataValue metadataValue = constructMetadataValue(context, "relation." + relationName);
metadataValue.setAuthority("virtual::" + relationshipId);
=======
private MetadataValue getRelationMetadataFromOtherItem(Item otherItem, String relationName) {
MetadataValue metadataValue = constructMetadataValue(
MetadataSchemaEnum.RELATION.getName() + "." + relationName);
metadataValue.setAuthority("virtual");
>>>>>>>
private RelationshipMetadataValue getRelationMetadataFromOtherItem(Context context, Item otherItem,
String relationName,
Integer relationshipId) {
RelationshipMetadataValue metadataValue = constructMetadataValue(context,
MetadataSchemaEnum.RELATION
.getName() + "." + relationName);
metadataValue.setAuthority("virtual::" + relationshipId);
<<<<<<<
private RelationshipMetadataValue constructResultingMetadataValue(Context context, Item item, Item otherItem,
VirtualBean virtualBean,
RelationshipMetadataValue metadataValue,
Integer relationshipId)
throws SQLException {
metadataValue.setValue(virtualBean.getValue(context, otherItem));
metadataValue.setAuthority("virtual::" + relationshipId);
=======
private MetadataValue constructResultingMetadataValue(Item item, String value, MetadataValue metadataValue) {
metadataValue.setValue(value);
metadataValue.setAuthority("virtual");
>>>>>>>
private RelationshipMetadataValue constructResultingMetadataValue(Item item, String value,
RelationshipMetadataValue metadataValue,
Integer relationshipId) {
metadataValue.setValue(value);
metadataValue.setAuthority("virtual::" + relationshipId); |
<<<<<<<
=======
import org.dspace.app.rest.converter.BitstreamConverter;
import org.dspace.app.rest.converter.CollectionConverter;
import org.dspace.app.rest.converter.MetadataConverter;
>>>>>>>
<<<<<<<
=======
CollectionConverter converter;
@Autowired
BitstreamConverter bitstreamConverter;
@Autowired
MetadataConverter metadataConverter;
@Autowired
>>>>>>>
<<<<<<<
public CollectionRestRepository(CollectionService dsoService) {
super(dsoService, new DSpaceObjectPatch<CollectionRest>() {});
this.cs = dsoService;
=======
@Autowired
private CollectionService cs;
@Autowired
private BitstreamService bitstreamService;
public CollectionRestRepository(CollectionService dsoService,
CollectionConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<CollectionRest>() {});
>>>>>>>
@Autowired
private CollectionService cs;
@Autowired
private BitstreamService bitstreamService;
public CollectionRestRepository(CollectionService dsoService) {
super(dsoService, new DSpaceObjectPatch<CollectionRest>() {});
this.cs = dsoService; |
<<<<<<<
import org.dspace.content.virtual.VirtualMetadataPopulator;
=======
import org.dspace.core.Constants;
>>>>>>>
import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants; |
<<<<<<<
.andExpect(jsonPath("$._links.submissiondefinitions.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.submissionforms.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.submissionsections.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.submissionuploads.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.workspaceitems.href", startsWith(REST_SERVER_URL)))
=======
.andExpect(jsonPath("$._links.authn.href", startsWith(REST_SERVER_URL)))
>>>>>>>
.andExpect(jsonPath("$._links.submissiondefinitions.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.submissionforms.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.submissionsections.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.submissionuploads.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.workspaceitems.href", startsWith(REST_SERVER_URL)))
.andExpect(jsonPath("$._links.authn.href", startsWith(REST_SERVER_URL))) |
<<<<<<<
=======
import org.dspace.app.rest.repository.handler.service.UriListHandlerService;
import org.dspace.app.rest.repository.patch.ItemPatch;
>>>>>>>
import org.dspace.app.rest.repository.handler.service.UriListHandlerService;
<<<<<<<
public ItemRestRepository(ItemService dsoService) {
super(dsoService);
=======
@Autowired
private UriListHandlerService uriListHandlerService;
public ItemRestRepository(ItemService dsoService, ItemPatch dsoPatch) {
super(dsoService, dsoPatch);
>>>>>>>
@Autowired
private UriListHandlerService uriListHandlerService;
public ItemRestRepository(ItemService dsoService) {
super(dsoService);
<<<<<<<
=======
protected void updateDSpaceObject(Item item, ItemRest itemRest)
throws AuthorizeException, SQLException {
super.updateDSpaceObject(item, itemRest);
Context context = obtainContext();
if (itemRest.getWithdrawn() != item.isWithdrawn()) {
if (itemRest.getWithdrawn()) {
itemService.withdraw(context, item);
} else {
itemService.reinstate(context, item);
}
}
if (itemRest.getDiscoverable() != item.isDiscoverable()) {
item.setDiscoverable(itemRest.getDiscoverable());
itemService.update(context, item);
}
}
@Override
>>>>>>> |
<<<<<<<
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.dspace.authorize.AuthorizeException;
import org.dspace.browse.BrowsableDSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants;
=======
import org.apache.commons.lang3.builder.HashCodeBuilder;
>>>>>>>
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.dspace.authorize.AuthorizeException;
import org.dspace.browse.BrowsableDSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants; |
<<<<<<<
total = gs.countTotal(context);
groups = gs.findAll(context, null, pageable.getPageSize(), Math.toIntExact(pageable.getOffset()));
=======
long total = gs.countTotal(context);
List<Group> groups = gs.findAll(context, null, pageable.getPageSize(), pageable.getOffset());
return converter.toRestPage(groups, pageable, total, utils.obtainProjection(true));
>>>>>>>
long total = gs.countTotal(context);
List<Group> groups = gs.findAll(context, null, pageable.getPageSize(), Math.toIntExact(pageable.getOffset()));
return converter.toRestPage(groups, pageable, total, utils.obtainProjection(true)); |
<<<<<<<
if (projectNameCandidate.equals(reposToBeFound.get(id))) {
expect(projectCache.get(new Project.NameKey(projectNameCandidate)))
.andReturn(createNiceMock(ProjectState.class));
break;
=======
if (reposToBeFound.containsValue(projectNameCandidate)) {
expect(repoManager.list()).andReturn(
new TreeSet<>(Collections.singletonList(
new Project.NameKey(projectNameCandidate))));
>>>>>>>
if (reposToBeFound.containsValue(projectNameCandidate)) {
expect(projectCache.get(new Project.NameKey(projectNameCandidate)))
.andReturn(createNiceMock(ProjectState.class)); |
<<<<<<<
import java.io.IOException;
import java.util.List;
=======
>>>>>>>
import java.util.List;
<<<<<<<
public final String LEGACY_WORKFLOW_NAME = "default";
public Workflow getWorkflow(Collection collection) throws WorkflowConfigurationException;
public Workflow getWorkflowByName(String workflowName) throws WorkflowConfigurationException;
public List<Workflow> getAllConfiguredWorkflows() throws WorkflowConfigurationException;
public boolean workflowByThisNameExists(String workflowName) throws WorkflowConfigurationException;
public boolean isDefaultWorkflow(String workflowName) throws WorkflowConfigurationException;
public Workflow getDefaultWorkflow() throws WorkflowConfigurationException;
public List<String> getCollectionHandlesMappedToWorklow(String workflowName) throws WorkflowConfigurationException;
public Step createStep(Workflow workflow, String stepID) throws WorkflowConfigurationException, IOException;
=======
>>>>>>>
public final String LEGACY_WORKFLOW_NAME = "default"; |
<<<<<<<
import org.dspace.services.model.Request;
import org.dspace.submit.AbstractProcessingStep;
=======
import org.dspace.eperson.EPerson;
import org.dspace.eperson.EPersonServiceImpl;
>>>>>>>
import org.dspace.services.model.Request;
import org.dspace.submit.AbstractProcessingStep;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.EPersonServiceImpl;
<<<<<<<
@Autowired
SubmissionService submissionService;
private SubmissionConfigReader submissionConfigReader;
=======
@Autowired
EPersonServiceImpl epersonService;
>>>>>>>
@Autowired
SubmissionService submissionService;
@Autowired
EPersonServiceImpl epersonService;
private SubmissionConfigReader submissionConfigReader; |
<<<<<<<
// Log.i("qbTask", "Results (torrents): " + result.length);
=======
Log.i("qbTask", "Results (torrents): " + result.length);
>>>>>>>
// Log.i("qbTask", "Results (torrents): " + result.length);
<<<<<<<
// Log.i("qbTask", "MainActivity.names: " +
// MainActivity.names.length);
// Log.i("qbTask", "MainActivity.lines: " +
// MainActivity.names.length);
=======
Log.i("qbTask", "MainActivity.names: " + MainActivity.names.length);
Log.i("qbTask", "MainActivity.lines: " + MainActivity.names.length);
>>>>>>>
// Log.i("qbTask", "MainActivity.names: " +
// MainActivity.names.length);
// Log.i("qbTask", "MainActivity.lines: " +
// MainActivity.names.length);
<<<<<<<
firstFragment.setListAdapter(new myAdapter(MainActivity.this, names, lines));
=======
firstFragment.setListAdapter(new myAdapter(MainActivity.this,names,lines));
>>>>>>>
firstFragment.setListAdapter(new myAdapter(MainActivity.this, names, lines));
<<<<<<<
}
@Override
public int getCount() {
// TODO Auto-generated method stub}
// Log.i("qbTask", "getCount: " + ((torrentsNames != null) ? torrentsNames.length : 0));
return (torrentsNames != null) ? torrentsNames.length : 0;
=======
}
@Override
public int getCount() {
// TODO Auto-generated method stub}
Log.i("qbTask", "getCount: " + ((torrentsNames != null) ? torrentsNames.length : 0));
return (torrentsNames != null) ? torrentsNames.length : 0;
>>>>>>>
}
@Override
public int getCount() {
// TODO Auto-generated method stub}
// Log.i("qbTask", "getCount: " + ((torrentsNames != null) ?
// torrentsNames.length : 0));
return (torrentsNames != null) ? torrentsNames.length : 0; |
<<<<<<<
@Setter
private boolean sprinting;
@Setter
private boolean jumping;
@Setter
private boolean switchingDimension = false;
=======
private final AtomicInteger pendingDimSwitches = new AtomicInteger(0);
>>>>>>>
private final AtomicInteger pendingDimSwitches = new AtomicInteger(0);
@Setter
private boolean sprinting;
@Setter
private boolean jumping;
@Setter
private boolean switchingDimension = false; |
<<<<<<<
import com.nukkitx.protocol.bedrock.packet.MovePlayerPacket;
import com.nukkitx.protocol.bedrock.packet.SetEntityDataPacket;
=======
import org.geysermc.connector.console.GeyserLogger;
>>>>>>>
import com.nukkitx.protocol.bedrock.packet.MovePlayerPacket;
import com.nukkitx.protocol.bedrock.packet.SetEntityDataPacket;
<<<<<<<
session.getConnector().getLogger().info("Spawned player at " + packet.getX() + " " + packet.getY() + " " + packet.getZ());
=======
ClientTeleportConfirmPacket teleportConfirmPacket = new ClientTeleportConfirmPacket(packet.getTeleportId());
session.getDownstream().getSession().send(teleportConfirmPacket);
GeyserLogger.DEFAULT.info("Spawned player at " + packet.getX() + " " + packet.getY() + " " + packet.getZ());
>>>>>>>
ClientTeleportConfirmPacket teleportConfirmPacket = new ClientTeleportConfirmPacket(packet.getTeleportId());
session.getDownstream().getSession().send(teleportConfirmPacket);
session.getConnector().getLogger().info("Spawned player at " + packet.getX() + " " + packet.getY() + " " + packet.getZ()); |
<<<<<<<
import org.geysermc.connector.command.CommandSender;
=======
>>>>>>>
import org.geysermc.connector.command.CommandSender;
<<<<<<<
import java.io.IOException;
=======
>>>>>>>
import java.io.IOException;
<<<<<<<
if (connector.getAuthType() != AuthType.ONLINE) {
connector.getLogger().info(
"Attempting to login using " + connector.getAuthType().name().toLowerCase() + " mode... " +
(connector.getAuthType() == AuthType.OFFLINE ?
"authentication is disabled." : "authentication will be encrypted"
)
);
authenticate(authData.getName());
}
=======
if (!(connector.getConfig().getRemote().getAuthType().hashCode() == "online".hashCode())) {
connector.getLogger().info("Attempting to login using offline mode... authentication is disabled.");
authenticate(authenticationData.getName());
}
>>>>>>>
if (connector.getAuthType() != AuthType.ONLINE) {
connector.getLogger().info(
"Attempting to login using " + connector.getAuthType().name().toLowerCase() + " mode... " +
(connector.getAuthType() == AuthType.OFFLINE ?
"authentication is disabled." : "authentication will be encrypted"
)
);
authenticate(authData.getName());
}
<<<<<<<
=======
public void setRenderDistance(int renderDistance) {
renderDistance = GenericMath.ceil(++renderDistance * TrigMath.SQRT_OF_TWO); //square to circle
if (renderDistance > 32) renderDistance = 32; // <3 u ViaVersion but I don't like crashing clients x)
this.renderDistance = renderDistance;
ChunkRadiusUpdatedPacket chunkRadiusUpdatedPacket = new ChunkRadiusUpdatedPacket();
chunkRadiusUpdatedPacket.setRadius(renderDistance);
upstream.sendPacket(chunkRadiusUpdatedPacket);
}
@Override
>>>>>>>
public void setRenderDistance(int renderDistance) {
renderDistance = GenericMath.ceil(++renderDistance * TrigMath.SQRT_OF_TWO); //square to circle
if (renderDistance > 32) renderDistance = 32; // <3 u ViaVersion but I don't like crashing clients x)
this.renderDistance = renderDistance;
ChunkRadiusUpdatedPacket chunkRadiusUpdatedPacket = new ChunkRadiusUpdatedPacket();
chunkRadiusUpdatedPacket.setRadius(renderDistance);
upstream.sendPacket(chunkRadiusUpdatedPacket);
} |
<<<<<<<
import com.nukkitx.protocol.bedrock.v389.Bedrock_v389;
=======
import com.nukkitx.protocol.bedrock.v388.Bedrock_v388;
>>>>>>>
import com.nukkitx.protocol.bedrock.v389.Bedrock_v389;
<<<<<<<
import org.geysermc.common.PlatformType;
import org.geysermc.common.bootstrap.IGeyserBootstrap;
import org.geysermc.common.logger.IGeyserLogger;
=======
import org.fusesource.jansi.AnsiConsole;
import org.geysermc.api.AuthType;
import org.geysermc.api.Connector;
import org.geysermc.api.Geyser;
import org.geysermc.api.Player;
import org.geysermc.api.command.CommandMap;
import org.geysermc.api.logger.Logger;
import org.geysermc.api.plugin.Plugin;
>>>>>>>
import org.geysermc.common.AuthType;
import org.geysermc.common.PlatformType;
import org.geysermc.common.bootstrap.IGeyserBootstrap;
import org.geysermc.common.logger.IGeyserLogger;
<<<<<<<
public class GeyserConnector {
public static final BedrockPacketCodec BEDROCK_PACKET_CODEC = Bedrock_v389.V389_CODEC;
=======
public class GeyserConnector implements Connector {
public static final BedrockPacketCodec BEDROCK_PACKET_CODEC = Bedrock_v388.V388_CODEC;
>>>>>>>
public class GeyserConnector {
public static final BedrockPacketCodec BEDROCK_PACKET_CODEC = Bedrock_v389.V389_CODEC;
<<<<<<<
private RemoteServer remoteServer;
=======
private final Map<Object, GeyserSession> players = new HashMap<>();
private RemoteJavaServer remoteServer;
private AuthType authType;
private Logger logger;
>>>>>>>
private RemoteServer remoteServer;
private AuthType authType;
<<<<<<<
remoteServer = new RemoteServer(config.getRemote().getAddress(), config.getRemote().getPort());
=======
remoteServer = new RemoteJavaServer(config.getRemote().getAddress(), config.getRemote().getPort());
authType = AuthType.getByName(config.getRemote().getAuthType());
Geyser.setConnector(this);
pluginManager = new GeyserPluginManager(new GeyserPluginLoader(this));
pluginManager.getLoader().loadPlugins();
>>>>>>>
remoteServer = new RemoteServer(config.getRemote().getAddress(), config.getRemote().getPort());
authType = AuthType.getByName(config.getRemote().getAuthType());
<<<<<<<
metrics.addCustomChart(new Metrics.SingleLineChart("players", players::size));
metrics.addCustomChart(new Metrics.SimplePie("authMode", config.getRemote()::getAuthType));
metrics.addCustomChart(new Metrics.SimplePie("platform", platformType::getPlatformName));
=======
metrics.addCustomChart(new Metrics.SingleLineChart("players", Geyser::getPlayerCount));
metrics.addCustomChart(new Metrics.SimplePie("authMode", () -> getAuthType().name().toLowerCase()));
>>>>>>>
metrics.addCustomChart(new Metrics.SingleLineChart("players", players::size));
metrics.addCustomChart(new Metrics.SimplePie("authMode", authType.name()::toLowerCase));
metrics.addCustomChart(new Metrics.SimplePie("platform", platformType::getPlatformName)); |
<<<<<<<
import com.nukkitx.protocol.bedrock.packet.NetworkChunkPublisherUpdatePacket;
=======
>>>>>>>
<<<<<<<
GeyserConnector.getInstance().getGeneralThreadPool().execute(() -> {
Vector2i chunkPos = session.getLastChunkPosition();
Vector3f position = session.getPlayerEntity().getPosition();
Vector2i newChunkPos = Vector2i.from(position.getFloorX() >> 4, position.getFloorZ() >> 4);
if (chunkPos == null || !chunkPos.equals(newChunkPos)) {
NetworkChunkPublisherUpdatePacket chunkPublisherUpdatePacket = new NetworkChunkPublisherUpdatePacket();
chunkPublisherUpdatePacket.setPosition(position.toInt());
chunkPublisherUpdatePacket.setRadius(session.getRenderDistance() << 4);
session.getUpstream().sendPacket(chunkPublisherUpdatePacket);
session.setLastChunkPosition(newChunkPos);
}
=======
// Not sure if this is safe or not, however without this the client usually times out
Geyser.getConnector().getGeneralThreadPool().execute(() -> {
>>>>>>>
// Not sure if this is safe or not, however without this the client usually times out
GeyserConnector.getInstance().getGeneralThreadPool().execute(() -> { |
<<<<<<<
=======
int blockId = BlockTranslator.getBedrockBlockId(blockState);
>>>>>>>
<<<<<<<
updateBlockPacket.setBlockPosition(position);
updateBlockPacket.setRuntimeId(blockEntry.getBedrockRuntimeId());
=======
updateBlockPacket.setBlockPosition(pos);
updateBlockPacket.setRuntimeId(blockId);
>>>>>>>
updateBlockPacket.setBlockPosition(position);
updateBlockPacket.setRuntimeId(blockId);
<<<<<<<
waterPacket.setBlockPosition(position);
if (blockEntry.isWaterlogged()) {
BlockEntry water = TranslatorsInit.getBlockTranslator().getBlockEntry("minecraft:water[level=0]");
waterPacket.setRuntimeId(water.getBedrockRuntimeId());
=======
waterPacket.setBlockPosition(pos);
if (BlockTranslator.isWaterlogged(blockState)) {
waterPacket.setRuntimeId(BEDROCK_WATER_ID);
>>>>>>>
waterPacket.setBlockPosition(position);
if (BlockTranslator.isWaterlogged(blockState)) {
waterPacket.setRuntimeId(BEDROCK_WATER_ID); |
<<<<<<<
=======
import org.geysermc.connector.entity.Entity;
import org.geysermc.connector.inventory.Inventory;
import org.geysermc.connector.network.session.GeyserSession;
import org.geysermc.connector.network.translators.PacketTranslator;
import org.geysermc.connector.network.translators.Translator;
import org.geysermc.connector.network.translators.Translators;
import org.geysermc.connector.network.translators.item.ItemTranslator;
import org.geysermc.connector.utils.InventoryUtils;
import com.nukkitx.math.vector.Vector3f;
>>>>>>> |
<<<<<<<
=======
import com.velocitypowered.api.plugin.PluginContainer;
import com.velocitypowered.api.proxy.ProxyServer;
>>>>>>>
import com.velocitypowered.api.plugin.PluginContainer;
import com.velocitypowered.api.proxy.ProxyServer;
<<<<<<<
import org.geysermc.connector.GeyserConfiguration;
=======
import org.geysermc.common.FloodgateKeyLoader;
import org.geysermc.common.IGeyserConfiguration;
>>>>>>>
import org.geysermc.connector.FloodgateKeyLoader;
import org.geysermc.connector.GeyserConfiguration; |
<<<<<<<
import com.nukkitx.protocol.bedrock.data.ItemData;
=======
import com.nukkitx.nbt.tag.Tag;
>>>>>>>
import com.nukkitx.protocol.bedrock.data.ItemData;
import com.nukkitx.nbt.tag.Tag; |
<<<<<<<
import org.geysermc.common.window.FormWindow;
=======
import org.geysermc.api.AuthType;
import org.geysermc.api.Player;
import org.geysermc.api.RemoteServer;
import org.geysermc.api.session.AuthData;
import org.geysermc.api.window.FormWindow;
>>>>>>>
import org.geysermc.common.AuthType;
import org.geysermc.common.window.FormWindow;
<<<<<<<
import org.geysermc.connector.network.remote.RemoteServer;
import org.geysermc.connector.network.session.auth.AuthData;
=======
import org.geysermc.connector.network.session.auth.BedrockClientData;
>>>>>>>
import org.geysermc.connector.network.remote.RemoteServer;
import org.geysermc.connector.network.session.auth.AuthData;
import org.geysermc.connector.network.session.auth.BedrockClientData;
<<<<<<<
public class GeyserSession implements CommandSender {
=======
public class GeyserSession implements Player {
>>>>>>>
public class GeyserSession implements CommandSender {
<<<<<<<
private AuthData authData;
=======
@Setter private AuthData authenticationData;
@Setter private BedrockClientData clientData;
>>>>>>>
@Setter private AuthData authData;
@Setter private BedrockClientData clientData;
<<<<<<<
if (!(connector.getConfig().getRemote().getAuthType().hashCode() == "online".hashCode())) {
connector.getLogger().info("Attempting to login using offline mode... authentication is disabled.");
authenticate(authData.getName());
=======
if (connector.getAuthType() != AuthType.ONLINE) {
connector.getLogger().info(
"Attempting to login using " + connector.getAuthType().name().toLowerCase() + " mode... " +
(connector.getAuthType() == AuthType.OFFLINE ?
"authentication is disabled." : "authentication will be encrypted"
)
);
authenticate(authenticationData.getName());
>>>>>>>
if (connector.getAuthType() != AuthType.ONLINE) {
connector.getLogger().info(
"Attempting to login using " + connector.getAuthType().name().toLowerCase() + " mode... " +
(connector.getAuthType() == AuthType.OFFLINE ?
"authentication is disabled." : "authentication will be encrypted"
)
);
authenticate(authData.getName());
<<<<<<<
public void setAuthenticationData(AuthData authData) {
this.authData = authData;
}
=======
>>>>>>>
public void setAuthenticationData(AuthData authData) {
this.authData = authData;
} |
<<<<<<<
private static final Map<String, NbtMap> FLOWER_POT_BLOCKS = new HashMap<>();
private static final Int2BooleanMap LECTERN_BOOK_STATES = new Int2BooleanOpenHashMap();
=======
>>>>>>>
private static final Int2BooleanMap LECTERN_BOOK_STATES = new Int2BooleanOpenHashMap(); |
<<<<<<<
import com.nukkitx.protocol.bedrock.packet.InventoryContentPacket;
=======
import com.nukkitx.protocol.bedrock.packet.AvailableEntityIdentifiersPacket;
import com.nukkitx.protocol.bedrock.packet.BiomeDefinitionListPacket;
import com.nukkitx.protocol.bedrock.packet.LevelChunkPacket;
import com.nukkitx.protocol.bedrock.packet.NetworkChunkPublisherUpdatePacket;
>>>>>>>
import com.nukkitx.protocol.bedrock.packet.AvailableEntityIdentifiersPacket;
import com.nukkitx.protocol.bedrock.packet.BiomeDefinitionListPacket;
import com.nukkitx.protocol.bedrock.packet.LevelChunkPacket;
import com.nukkitx.protocol.bedrock.packet.NetworkChunkPublisherUpdatePacket;
import com.nukkitx.protocol.bedrock.packet.InventoryContentPacket;
<<<<<<<
PlayStatusPacket playStatusPacket = new PlayStatusPacket();
playStatusPacket.setStatus(PlayStatusPacket.Status.PLAYER_SPAWN);
upstream.sendPacket(playStatusPacket);
InventoryContentPacket creativePacket = new InventoryContentPacket();
creativePacket.setContainerId(ContainerId.CREATIVE);
creativePacket.setContents(Toolbox.CREATIVE_ITEMS);
upstream.sendPacket(creativePacket);
=======
>>>>>>> |
<<<<<<<
command("git-upload-pack").to(Commands.key(git, "upload-pack"));
command(git, "upload-pack").to(Upload.class);
command("git-upload-archive").to(Commands.key(git, "upload-archive"));
command(git, "upload-archive").to(UploadArchive.class);
=======
if (sshEnabled()) {
command("git-upload-pack").to(Commands.key(git, "upload-pack"));
command(git, "upload-pack").to(Upload.class);
}
>>>>>>>
if (sshEnabled()) {
command("git-upload-pack").to(Commands.key(git, "upload-pack"));
command(git, "upload-pack").to(Upload.class);
command("git-upload-archive").to(Commands.key(git, "upload-archive"));
command(git, "upload-archive").to(UploadArchive.class);
}
<<<<<<<
if (slaveMode) {
command("git-receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command("gerrit-receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command(git, "receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command(gerrit, "test-submit").to(NotSupportedInSlaveModeFailureCommand.class);
} else {
command("git-receive-pack").to(Commands.key(git, "receive-pack"));
command("gerrit-receive-pack").to(Commands.key(git, "receive-pack"));
command(git, "receive-pack").to(Commands.key(gerrit, "receive-pack"));
=======
if (!slaveMode) {
if (sshEnabled()) {
command("git-receive-pack").to(Commands.key(git, "receive-pack"));
command("gerrit-receive-pack").to(Commands.key(git, "receive-pack"));
command(git, "receive-pack").to(Commands.key(gerrit, "receive-pack"));
}
>>>>>>>
if (slaveMode) {
command("git-receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command("gerrit-receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command(git, "receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command(gerrit, "test-submit").to(NotSupportedInSlaveModeFailureCommand.class);
} else {
if (sshEnabled()) {
command("git-receive-pack").to(Commands.key(git, "receive-pack"));
command("gerrit-receive-pack").to(Commands.key(git, "receive-pack"));
command(git, "receive-pack").to(Commands.key(gerrit, "receive-pack"));
} |
<<<<<<<
import org.geysermc.connector.GeyserConfiguration;
=======
import lombok.Setter;
import org.geysermc.common.IGeyserConfiguration;
>>>>>>>
import lombok.Setter;
import org.geysermc.connector.GeyserConfiguration; |
<<<<<<<
import com.github.steveice10.mc.protocol.data.game.statistic.Statistic;
=======
import com.github.steveice10.mc.protocol.data.game.recipe.Recipe;
>>>>>>>
import com.github.steveice10.mc.protocol.data.game.statistic.Statistic;
import com.github.steveice10.mc.protocol.data.game.recipe.Recipe;
<<<<<<<
import it.unimi.dsi.fastutil.objects.ObjectIterator;
=======
import lombok.AccessLevel;
>>>>>>>
import it.unimi.dsi.fastutil.objects.ObjectIterator;
import lombok.AccessLevel;
<<<<<<<
import org.geysermc.connector.entity.player.SkullPlayerEntity;
import org.geysermc.connector.entity.player.SessionPlayerEntity;
=======
import org.geysermc.connector.entity.PlayerEntity;
import org.geysermc.connector.inventory.Inventory;
>>>>>>>
import org.geysermc.connector.entity.player.SkullPlayerEntity;
import org.geysermc.connector.entity.player.SessionPlayerEntity;
import org.geysermc.connector.inventory.Inventory;
<<<<<<<
import org.geysermc.connector.network.translators.collision.CollisionManager;
import org.geysermc.connector.network.translators.inventory.EnchantmentInventoryTranslator;
=======
>>>>>>>
import org.geysermc.connector.network.translators.collision.CollisionManager;
<<<<<<<
private final SessionPlayerEntity playerEntity;
private PlayerInventory inventory;
=======
private PlayerEntity playerEntity;
private final PlayerInventory playerInventory;
@Setter
private Inventory openInventory;
private final AtomicInteger itemNetId = new AtomicInteger(1);
@Getter(AccessLevel.NONE)
private final Object inventoryLock = new Object();
@Getter(AccessLevel.NONE)
private CompletableFuture<Void> inventoryFuture;
>>>>>>>
private final SessionPlayerEntity playerEntity;
<<<<<<<
private EntityEffectCache effectCache;
private InventoryCache inventoryCache;
=======
>>>>>>>
private EntityEffectCache effectCache;
<<<<<<<
this.inventoryCache.getInventories().put(0, inventory);
connector.getPlayers().forEach(player -> this.emotes.addAll(player.getEmotes()));
=======
>>>>>>>
connector.getPlayers().forEach(player -> this.emotes.addAll(player.getEmotes()));
<<<<<<<
public void addTeleport(TeleportCache teleportCache) {
teleportMap.put(teleportCache.getTeleportConfirmId(), teleportCache);
ObjectIterator<Int2ObjectMap.Entry<TeleportCache>> it = teleportMap.int2ObjectEntrySet().iterator();
// Remove any teleports with a higher number - maybe this is a world change that reset the ID to 0?
while (it.hasNext()) {
Int2ObjectMap.Entry<TeleportCache> entry = it.next();
int nextID = entry.getValue().getTeleportConfirmId();
if (nextID > teleportCache.getTeleportConfirmId()) {
it.remove();
}
}
}
=======
/**
* Adds a new inventory task.
* Inventory tasks are executed one at a time, in order.
*
* @param task the task to run
*/
public void addInventoryTask(Runnable task) {
synchronized (inventoryLock) {
System.out.println("new task " + task.toString());
inventoryFuture = inventoryFuture.thenRun(task).exceptionally(throwable -> {
GeyserConnector.getInstance().getLogger().error("Error processing inventory task", throwable.getCause());
return null;
});
}
}
/**
* Adds a new inventory task with a delay.
* The delay is achieved by scheduling with the Geyser general thread pool.
* Inventory tasks are executed one at a time, in order.
*
* @param task the delayed task to run
* @param delayMillis delay in milliseconds
*/
public void addInventoryTask(Runnable task, long delayMillis) {
synchronized (inventoryLock) {
System.out.println("new delayed task " + task.toString());
Executor delayedExecutor = command -> GeyserConnector.getInstance().getGeneralThreadPool().schedule(command, delayMillis, TimeUnit.MILLISECONDS);
inventoryFuture = inventoryFuture.thenRunAsync(task, delayedExecutor).exceptionally(throwable -> {
GeyserConnector.getInstance().getLogger().error("Error processing inventory task", throwable.getCause());
return null;
});
}
}
>>>>>>>
/**
* Adds a new inventory task.
* Inventory tasks are executed one at a time, in order.
*
* @param task the task to run
*/
public void addInventoryTask(Runnable task) {
synchronized (inventoryLock) {
System.out.println("new task " + task.toString());
inventoryFuture = inventoryFuture.thenRun(task).exceptionally(throwable -> {
GeyserConnector.getInstance().getLogger().error("Error processing inventory task", throwable.getCause());
return null;
});
}
}
/**
* Adds a new inventory task with a delay.
* The delay is achieved by scheduling with the Geyser general thread pool.
* Inventory tasks are executed one at a time, in order.
*
* @param task the delayed task to run
* @param delayMillis delay in milliseconds
*/
public void addInventoryTask(Runnable task, long delayMillis) {
synchronized (inventoryLock) {
System.out.println("new delayed task " + task.toString());
Executor delayedExecutor = command -> GeyserConnector.getInstance().getGeneralThreadPool().schedule(command, delayMillis, TimeUnit.MILLISECONDS);
inventoryFuture = inventoryFuture.thenRunAsync(task, delayedExecutor).exceptionally(throwable -> {
GeyserConnector.getInstance().getLogger().error("Error processing inventory task", throwable.getCause());
return null;
});
}
}
public void addTeleport(TeleportCache teleportCache) {
teleportMap.put(teleportCache.getTeleportConfirmId(), teleportCache);
ObjectIterator<Int2ObjectMap.Entry<TeleportCache>> it = teleportMap.int2ObjectEntrySet().iterator();
// Remove any teleports with a higher number - maybe this is a world change that reset the ID to 0?
while (it.hasNext()) {
Int2ObjectMap.Entry<TeleportCache> entry = it.next();
int nextID = entry.getValue().getTeleportConfirmId();
if (nextID > teleportCache.getTeleportConfirmId()) {
it.remove();
}
}
} |
<<<<<<<
import com.nukkitx.protocol.bedrock.packet.ContainerOpenPacket;
import com.nukkitx.protocol.bedrock.packet.InventorySlotPacket;
import com.nukkitx.protocol.bedrock.data.inventory.InventoryActionData;
import com.nukkitx.protocol.bedrock.data.inventory.InventorySource;
import com.nukkitx.protocol.bedrock.packet.InventoryTransactionPacket;
import com.nukkitx.protocol.bedrock.packet.LevelEventPacket;
=======
import com.nukkitx.protocol.bedrock.packet.*;
>>>>>>>
import com.nukkitx.protocol.bedrock.data.inventory.InventoryActionData;
import com.nukkitx.protocol.bedrock.data.inventory.InventorySource;
import com.nukkitx.protocol.bedrock.packet.*; |
<<<<<<<
public static class ElasticNodeInfo {
public final int port;
public ElasticNodeInfo(int port) {
this.port = port;
}
}
public static void configure(Config config, int port, String prefix, ElasticVersion version) {
config.setString("index", null, "type", "elasticsearch");
config.setString("elasticsearch", null, "server", "http://localhost:" + port);
=======
public static void configure(
Config config, ElasticContainer container, String prefix, ElasticVersion version) {
String hostname = container.getHttpHost().getHostName();
int port = container.getHttpHost().getPort();
config.setEnum("index", null, "type", IndexType.ELASTICSEARCH);
config.setString("elasticsearch", null, "server", "http://" + hostname + ":" + port);
>>>>>>>
public static void configure(
Config config, ElasticContainer container, String prefix, ElasticVersion version) {
String hostname = container.getHttpHost().getHostName();
int port = container.getHttpHost().getPort();
config.setString("index", null, "type", "elasticsearch");
config.setString("elasticsearch", null, "server", "http://" + hostname + ":" + port); |
<<<<<<<
=======
import com.github.steveice10.mc.protocol.data.game.entity.metadata.ItemStack;
import com.github.steveice10.mc.protocol.packet.ingame.server.entity.player.ServerPlayerActionAckPacket;
import com.github.steveice10.opennbt.tag.builtin.*;
import com.nukkitx.math.vector.Vector3f;
import com.nukkitx.protocol.bedrock.data.LevelEventType;
import com.nukkitx.protocol.bedrock.packet.LevelEventPacket;
import org.geysermc.connector.inventory.PlayerInventory;
>>>>>>>
import com.github.steveice10.mc.protocol.data.game.entity.metadata.ItemStack;
import com.github.steveice10.mc.protocol.packet.ingame.server.entity.player.ServerPlayerActionAckPacket;
import com.github.steveice10.opennbt.tag.builtin.*;
import com.nukkitx.math.vector.Vector3f;
import com.nukkitx.protocol.bedrock.data.LevelEventType;
import com.nukkitx.protocol.bedrock.packet.LevelEventPacket;
import org.geysermc.connector.inventory.PlayerInventory;
<<<<<<<
import org.geysermc.connector.network.translators.Translator;
=======
import org.geysermc.connector.network.translators.TranslatorsInit;
import org.geysermc.connector.network.translators.block.BlockTranslator;
import org.geysermc.connector.network.translators.item.ItemEntry;
import org.geysermc.connector.utils.BlockUtils;
>>>>>>>
import org.geysermc.connector.network.translators.Translators;
import org.geysermc.connector.network.translators.block.BlockTranslator;
import org.geysermc.connector.network.translators.item.ItemEntry;
import org.geysermc.connector.utils.BlockUtils;
import org.geysermc.connector.network.translators.Translator; |
<<<<<<<
import com.google.gerrit.extensions.restapi.TopLevelResource;
import com.google.gerrit.extensions.restapi.Url;
=======
import com.google.gerrit.extensions.restapi.Url;
>>>>>>>
import com.google.gerrit.extensions.restapi.TopLevelResource;
import com.google.gerrit.extensions.restapi.Url;
<<<<<<<
user1 = user("user1", group1);
user2 = user("user2", group2);
user3 = user("user3", group1, group2);
=======
user1 = accounts.create("user1", "[email protected]", "First1 Last1",
"users1");
user2 = accounts.create("user2", "[email protected]", "First2 Last2",
"users2");
user3 = accounts.create("user3", "[email protected]", "First3 Last3",
"users1", "users2");
>>>>>>>
user1 = user("user1", "First1 Last1", group1);
user2 = user("user2", "First2 Last2", group2);
user3 = user("user3", "First3 Last3", group1, group2);
<<<<<<<
=======
private List<SuggestedReviewerInfo> suggestReviewers(RestSession session,
String changeId, String query, int n) throws IOException {
return newGson().fromJson(
session.get("/changes/"
+ changeId
+ "/suggest_reviewers?q="
+ Url.encode(query)
+ "&n="
+ n)
.getReader(),
new TypeToken<List<SuggestedReviewerInfo>>() {}
.getType());
}
>>>>>>> |
<<<<<<<
/** @return true if this user can delete their own changes. */
boolean canDeleteOwnChanges(boolean isChangeOwner) {
return canPerform(Permission.DELETE_OWN_CHANGES, isChangeOwner);
=======
/** @return true if this user can delete changes. */
public boolean canDeleteChanges(boolean isChangeOwner) {
return canPerform(Permission.DELETE_CHANGES)
|| (isChangeOwner && canPerform(Permission.DELETE_OWN_CHANGES, isChangeOwner));
>>>>>>>
/** @return true if this user can delete changes. */
boolean canDeleteChanges(boolean isChangeOwner) {
return canPerform(Permission.DELETE_CHANGES)
|| (isChangeOwner && canPerform(Permission.DELETE_OWN_CHANGES, isChangeOwner)); |
<<<<<<<
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.gerrit.common.FileUtil;
=======
import com.google.common.base.Preconditions;
>>>>>>>
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.base.Preconditions;
import com.google.gerrit.common.FileUtil; |
<<<<<<<
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;
=======
import java.util.concurrent.TimeUnit;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import scala.concurrent.duration.Duration;
import scala.concurrent.duration.FiniteDuration;
>>>>>>>
import static ee.ria.xroad.signer.protocol.ComponentNames.MODULE_MANAGER;
import static ee.ria.xroad.signer.protocol.ComponentNames.OCSP_CLIENT;
import static ee.ria.xroad.signer.protocol.ComponentNames.OCSP_CLIENT_JOB;
import static ee.ria.xroad.signer.protocol.ComponentNames.OCSP_CLIENT_RELOAD;
import static ee.ria.xroad.signer.protocol.ComponentNames.OCSP_RESPONSE_MANAGER;
import static ee.ria.xroad.signer.protocol.ComponentNames.REQUEST_PROCESSOR;
import java.util.concurrent.TimeUnit;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;
<<<<<<<
private static final FiniteDuration MODULE_MANAGER_INTERVAL =
Duration.create(60, TimeUnit.SECONDS);
ModuleManagerJob() {
super(MODULE_MANAGER, new Update(), MODULE_MANAGER_INTERVAL);
=======
ModuleManagerJob() {
super(MODULE_MANAGER, new Update(), MODULE_MANAGER_UPDATE_INTERVAL);
>>>>>>>
ModuleManagerJob() {
super(MODULE_MANAGER, new Update(), MODULE_MANAGER_UPDATE_INTERVAL);
<<<<<<<
=======
/**
* Periodically executes the OcspClient
*/
private static class OcspClientJob extends VariableIntervalPeriodicJob {
private static final FiniteDuration INITIAL_DELAY =
FiniteDuration.create(100, TimeUnit.MILLISECONDS);
OcspClientJob() {
super(OCSP_CLIENT, OcspClientWorker.EXECUTE);
}
@Override
protected FiniteDuration getInitialDelay() {
return INITIAL_DELAY;
}
@Override
protected FiniteDuration getNextDelay() {
return FiniteDuration.create(
OcspClientWorker.getNextOcspFreshnessSeconds(),
TimeUnit.SECONDS);
}
}
>>>>>>> |
<<<<<<<
import ee.ria.xroad.asyncdb.AsyncDB;
import ee.ria.xroad.asyncdb.WritingCtx;
import ee.ria.xroad.asyncdb.messagequeue.MessageQueue;
=======
import java.io.InputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.bind.Marshaller;
import javax.xml.soap.SOAPEnvelope;
import javax.xml.soap.SOAPMessage;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.HttpClient;
import org.bouncycastle.cert.ocsp.OCSPResp;
import org.bouncycastle.util.Arrays;
import org.w3c.dom.Node;
>>>>>>>
import static ee.ria.xroad.common.ErrorCodes.X_INCONSISTENT_RESPONSE;
import static ee.ria.xroad.common.ErrorCodes.X_INTERNAL_ERROR;
import static ee.ria.xroad.common.ErrorCodes.X_INVALID_SECURITY_SERVER;
import static ee.ria.xroad.common.ErrorCodes.X_MALFORMED_SOAP;
import static ee.ria.xroad.common.ErrorCodes.X_MISSING_SIGNATURE;
import static ee.ria.xroad.common.ErrorCodes.X_MISSING_SOAP;
import static ee.ria.xroad.common.ErrorCodes.X_SERVICE_FAILED_X;
import static ee.ria.xroad.common.ErrorCodes.X_UNKNOWN_MEMBER;
import static ee.ria.xroad.common.ErrorCodes.translateException;
import static ee.ria.xroad.common.SystemProperties.getServerProxyPort;
import static ee.ria.xroad.common.SystemProperties.isSslEnabled;
import static ee.ria.xroad.common.util.AbstractHttpSender.CHUNKED_LENGTH;
import static ee.ria.xroad.common.util.CryptoUtils.calculateDigest;
import static ee.ria.xroad.common.util.CryptoUtils.decodeBase64;
import static ee.ria.xroad.common.util.CryptoUtils.encodeBase64;
import static ee.ria.xroad.common.util.CryptoUtils.getAlgorithmId;
import static ee.ria.xroad.common.util.MimeUtils.HEADER_HASH_ALGO_ID;
import static ee.ria.xroad.common.util.MimeUtils.HEADER_ORIGINAL_CONTENT_TYPE;
import static ee.ria.xroad.common.util.MimeUtils.HEADER_PROXY_VERSION;
import static ee.ria.xroad.proxy.clientproxy.FastestConnectionSelectingSSLSocketFactory.ID_TARGETS;
import java.io.InputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.bind.Marshaller;
import javax.xml.soap.SOAPEnvelope;
import javax.xml.soap.SOAPMessage;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.HttpClient;
import org.bouncycastle.cert.ocsp.OCSPResp;
import org.bouncycastle.util.Arrays;
import org.w3c.dom.Node;
<<<<<<<
private void logRequestMessage() throws Exception {
if (request != null) {
log.trace("logRequestMessage()");
MessageLog.log(requestSoap, request.getSignature(), true);
}
}
=======
>>>>>>>
<<<<<<<
if (!SystemProperties.isSslEnabled()) {
=======
if (!isSslEnabled()) {
>>>>>>>
if (!isSslEnabled()) {
<<<<<<<
final int port = SystemProperties.getServerProxyPort();
return new URI("https", null, "localhost", port, "/", null, null);
=======
return new URI("https", null, "localhost", getServerProxyPort(), "/",
null, null);
>>>>>>>
return new URI("https", null, "localhost", getServerProxyPort(), "/",
null, null);
<<<<<<<
if (serverId != null) {
final String securityServerAddress = GlobalConf.getSecurityServerAddress(serverId);
if (securityServerAddress == null) {
throw new CodedException(X_INVALID_SECURITY_SERVER,
"Could not find security server \"%s\"",
serverId);
}
if (!hostNames.contains(securityServerAddress)) {
throw new CodedException(X_INVALID_SECURITY_SERVER,
"Invalid security server \"%s\"",
serviceProvider);
}
hostNames = Collections.singleton(securityServerAddress);
}
String protocol = SystemProperties.isSslEnabled() ? "https" : "http";
int port = SystemProperties.getServerProxyPort();
=======
String protocol = isSslEnabled() ? "https" : "http";
int port = getServerProxyPort();
>>>>>>>
if (serverId != null) {
final String securityServerAddress = GlobalConf.getSecurityServerAddress(serverId);
if (securityServerAddress == null) {
throw new CodedException(X_INVALID_SECURITY_SERVER,
"Could not find security server \"%s\"",
serverId);
}
if (!hostNames.contains(securityServerAddress)) {
throw new CodedException(X_INVALID_SECURITY_SERVER,
"Invalid security server \"%s\"",
serviceProvider);
}
hostNames = Collections.singleton(securityServerAddress);
}
String protocol = isSslEnabled() ? "https" : "http";
int port = getServerProxyPort();
<<<<<<<
if (handler == null) {
chooseHandler();
}
handler.soap(message);
}
@Override
public void attachment(String contentType, InputStream content,
Map<String, String> additionalHeaders) throws Exception {
log.trace("attachment({})", contentType);
if (handler != null) {
handler.attachment(contentType, content, additionalHeaders);
} else {
throw new CodedException(X_INTERNAL_ERROR,
"No soap message handler present");
}
}
@Override
public void fault(SoapFault fault) throws Exception {
onError(fault.toCodedException());
}
private void chooseHandler() {
isAsync = requestSoap.isAsync() && (servletRequest.getHeader(
SoapUtils.X_IGNORE_ASYNC) == null);
if (isAsync) {
log.trace("Creating handler for asynchronous messages");
handler = new AsyncSoapMessageHandler();
} else {
log.trace("Creating handler for normal messages");
handler = new DefaultSoapMessageHandler();
}
}
@Override
public void onCompleted() {
log.trace("onCompleted()");
if (requestSoap == null) {
setError(new ClientException(X_MISSING_SOAP,
"Request does not contain SOAP message"));
return;
}
if (handler != null) {
handler.onCompleted();
}
try {
logRequestMessage();
} catch (Exception e) {
setError(e);
}
}
@Override
public void onError(Exception e) throws Exception {
log.error("onError(): ", e);
if (handler != null) {
handler.onError(e);
} else {
throw e;
}
}
@Override
public void close() {
handler.close();
}
}
private class DefaultSoapMessageHandler
implements SoapMessageDecoder.Callback {
@Override
public void soap(SoapMessage message) throws Exception {
=======
>>>>>>>
<<<<<<<
=======
}
private void logRequestMessage() throws Exception {
log.trace("logRequestMessage()");
MessageLog.log(requestSoap, request.getSignature(), true);
>>>>>>>
}
private void logRequestMessage() throws Exception {
log.trace("logRequestMessage()");
MessageLog.log(requestSoap, request.getSignature(), true);
<<<<<<<
=======
log.error("onError(): ", e);
>>>>>>>
log.error("onError(): ", e);
<<<<<<<
@Override
public void close() {
if (request != null) {
try {
request.close();
} catch (Exception e) {
setError(e);
}
}
}
}
private class AsyncSoapMessageHandler
implements SoapMessageDecoder.Callback {
WritingCtx writingCtx = null;
SoapMessageConsumer consumer = null;
@Override
public void soap(SoapMessage message) throws Exception {
if (writingCtx == null) {
MessageQueue queue =
AsyncDB.getMessageQueue(requestServiceId.getClientId());
writingCtx = queue.startWriting();
consumer = writingCtx.getConsumer();
}
consumer.soap(message);
}
=======
>>>>>>> |
<<<<<<<
=======
/**
* @param queryId the query ID
* @return MD5 hex digest of the given query ID
* @throws Exception if any errors occur
*/
public static String hashQueryId(String queryId) throws Exception {
return hexDigest(MD5_ID, queryId);
}
static String decodeBase64(String base64Encoded) {
return (base64Encoded != null && !base64Encoded.isEmpty())
? new String(CryptoUtils.decodeBase64(base64Encoded)) : null;
}
>>>>>>>
/**
* @param queryId the query ID
* @return MD5 hex digest of the given query ID
* @throws Exception if any errors occur
*/
public static String hashQueryId(String queryId) throws Exception {
return hexDigest(MD5_ID, queryId);
}
static String decodeBase64(String base64Encoded) {
return (base64Encoded != null && !base64Encoded.isEmpty())
? new String(CryptoUtils.decodeBase64(base64Encoded)) : null;
} |
<<<<<<<
TestConfDir(String name, boolean writeExpireDate) {
this(name);
this.writeExpireDate = writeExpireDate;
}
=======
>>>>>>> |
<<<<<<<
} catch (Exception ex) {
log.error("Request processing error", ex);
=======
} catch (Throwable ex) {
CodedException cex = translateWithPrefix(SERVER_SERVERPROXY_X, ex);
log.error("Request processing error (" + cex.getFaultDetail() + ")",
ex);
>>>>>>>
} catch (Exception ex) {
CodedException cex = translateWithPrefix(SERVER_SERVERPROXY_X, ex);
log.error("Request processing error (" + cex.getFaultDetail() + ")",
ex); |
<<<<<<<
import ee.ria.xroad.common.ocsp.OcspVerifierOptions;
=======
import ee.ria.xroad.common.util.CertUtils;
>>>>>>>
import ee.ria.xroad.common.ocsp.OcspVerifierOptions;
import ee.ria.xroad.common.util.CertUtils;
<<<<<<<
if (securityServer.equals(GlobalConf.getServerId(cert))) {
verifyOcspResponse(securityServer.getXRoadInstance(), cert, certInfo.getOcspBytes(),
new OcspVerifierOptions(GlobalConfExtensions.getInstance().shouldVerifyOcspNextUpdate()));
=======
if (securityServer.equals(serverIdFromConf)) {
verifyOcspResponse(securityServer.getXRoadInstance(), cert,
certInfo.getOcspBytes());
>>>>>>>
if (securityServer.equals(serverIdFromConf)) {
verifyOcspResponse(securityServer.getXRoadInstance(), cert, certInfo.getOcspBytes(),
new OcspVerifierOptions(GlobalConfExtensions.getInstance().shouldVerifyOcspNextUpdate()));
<<<<<<<
log.warn("Ignoring authentication certificate '{}' because: ",
cert.getSubjectX500Principal().getName(), e);
=======
log.warn("Ignoring authentication certificate '{}' because: {}",
CertUtils.identify(cert),
e.getMessage());
return false;
>>>>>>>
log.warn("Ignoring authentication certificate '{}' because: ",
cert.getSubjectX500Principal().getName(), e);
return false; |
<<<<<<<
} catch (Exception e) {
sendResponse(translateException(e).withPrefix(SIGNER_X));
=======
>>>>>>> |
<<<<<<<
import org.opentripplanner.routing.edgetype.RentABikeOnEdge;
import org.opentripplanner.routing.edgetype.TinyTurnEdge;
import org.opentripplanner.routing.edgetype.TransitBoardAlight;
=======
>>>>>>>
<<<<<<<
// debug: push vehicle late status out to UI
// if (backEdge instanceof PatternHop) {
// TripTimes tt = state.getTripTimes();
// int hop = ((PatternHop)backEdge).stopIndex;
// LOG.info("{} {}", tt.getTrip().toString(), hop);
// if ( ! tt.isScheduled()) {
// int delay = tt.getDepartureDelay(hop);
// String d = "on time";
// if (Math.abs(delay) > 10) {
// d = String.format("%2.1f min %s", delay / 60.0,
// (delay < 0) ? "early" : "late");
// }
// d = "Using real-time delay information: ".concat(d);
// leg.addAlert(Alert.createSimpleAlerts(d));
// LOG.info(d);
// }
// else {
// leg.addAlert(Alert.createSimpleAlerts("Using published timetables."));
// LOG.info("sched");
// }
// }
TraverseMode mode = backEdgeNarrative.getMode();
=======
TraverseMode mode = state.getBackMode();
>>>>>>>
// debug: push vehicle late status out to UI
// if (backEdge instanceof PatternHop) {
// TripTimes tt = state.getTripTimes();
// int hop = ((PatternHop)backEdge).stopIndex;
// LOG.info("{} {}", tt.getTrip().toString(), hop);
// if ( ! tt.isScheduled()) {
// int delay = tt.getDepartureDelay(hop);
// String d = "on time";
// if (Math.abs(delay) > 10) {
// d = String.format("%2.1f min %s", delay / 60.0,
// (delay < 0) ? "early" : "late");
// }
// d = "Using real-time delay information: ".concat(d);
// leg.addAlert(Alert.createSimpleAlerts(d));
// LOG.info(d);
// }
// else {
// leg.addAlert(Alert.createSimpleAlerts("Using published timetables."));
// LOG.info("sched");
// }
// }
TraverseMode mode = state.getBackMode(); |
<<<<<<<
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
=======
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.LineString;
import gnu.trove.iterator.TObjectIntIterator;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import jersey.repackaged.com.google.common.collect.Maps;
>>>>>>>
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.LineString; |
<<<<<<<
private TObjectIntMap[] matrix;
/** The best time to reach each stop in any round by transit only, not by transfer from another stop. */
=======
private TObjectIntMap[] matrix;
>>>>>>>
private TObjectIntMap[] matrix;
/** The best time to reach each stop in any round by transit only, not by transfer from another stop. */
<<<<<<<
/** The maximum acceptable clock time in seconds since midnight. All arrivals after this time will be ignored. */
=======
>>>>>>>
/** The maximum acceptable clock time in seconds since midnight. All arrivals after this time will be ignored. */
<<<<<<<
boolean stored = false;
// if (time > maxTime)
// return false;
=======
if (time > maxTime)
return false;
// This does not store internal algorithm state as it used to, but rather only the output.
// The reasoning is that, in dynamic programming/range RAPTOR mode, bestStops is carried over between runs of
// the algorithm. But you still want to propagate a non-optimal time with fewer transfers, because the
// optimal time at this stop might have already used up all of the transfers.
if (!transfer && time < bestStops.get(t))
bestStops.put(t, time);
>>>>>>>
boolean stored = false;
// if (time > maxTime)
// return false;
// This does not store internal algorithm state as it used to, but rather only the output.
// The reasoning is that, in dynamic programming/range RAPTOR mode, bestStops is carried over between runs of
// the algorithm. But you still want to propagate a non-optimal time with fewer transfers, because the
// optimal time at this stop might have already used up all of the transfers.
if (!transfer && time < bestStops.get(t))
bestStops.put(t, time); |
<<<<<<<
public class TransitStopStreetVertex extends OsmVertex {
=======
/**
* A vertex for an OSM node that represents a transit stop and has a ref=(stop_code) tag.
* OTP will treat this as an authoritative statement on where the transit stop is located within the street network,
* and the GTFS stop vertex will be linked to exactly this location.
*/
public class TransitStopStreetVertex extends IntersectionVertex {
>>>>>>>
/**
* A vertex for an OSM node that represents a transit stop and has a ref=(stop_code) tag.
* OTP will treat this as an authoritative statement on where the transit stop is located within the street network,
* and the GTFS stop vertex will be linked to exactly this location.
*/
public class TransitStopStreetVertex extends OsmVertex { |
<<<<<<<
=======
import org.opentripplanner.graph_builder.module.StreetLinkerModule;
>>>>>>>
import org.opentripplanner.graph_builder.module.StreetLinkerModule; |
<<<<<<<
import org.opentripplanner.routing.RoutingService;
import org.opentripplanner.routing.graph.Vertex;
=======
>>>>>>>
import org.opentripplanner.routing.RoutingService;
<<<<<<<
private String getLocationOfFirstQuay(String vertexId, RoutingService routingService) {
Vertex vertex = routingService.getStopVertexForStop().get(vertexId);
=======
/*
private String getLocationOfFirstQuay(String vertexId, GraphIndex graphIndex) {
// TODO THIS DOES NOT WORK !!
Vertex vertex = graphIndex.stopVertexForStop.get(vertexId);
>>>>>>>
/*
private String getLocationOfFirstQuay(String vertexId, GraphIndex graphIndex) {
// TODO THIS DOES NOT WORK !!
Vertex vertex = graphIndex.stopVertexForStop.get(vertexId); |
<<<<<<<
import org.opentripplanner.routing.core.Vertex;
=======
import org.opentripplanner.routing.core.GraphVertex;
import org.opentripplanner.routing.core.TraverseMode;
import org.opentripplanner.routing.edgetype.PatternHop;
>>>>>>>
import org.opentripplanner.routing.core.Vertex;
import org.opentripplanner.routing.core.TraverseMode;
import org.opentripplanner.routing.edgetype.PatternHop;
<<<<<<<
for (Vertex gv : graph.getVertices()) {
Coordinate c = gv.getCoordinate();
=======
for (GraphVertex gv : graph.getVertices()) {
for (Edge e: gv.getOutgoing()) {
if (e instanceof PatternHop) {
transitModes.add(((PatternHop) e).getMode());
}
}
Coordinate c = gv.vertex.getCoordinate();
>>>>>>>
for (Vertex v : graph.getVertices()) {
for (Edge e: v.getOutgoing()) {
if (e instanceof PatternHop) {
transitModes.add(((PatternHop) e).getMode());
}
}
Coordinate c = v.getCoordinate(); |
<<<<<<<
=======
import org.geotools.geometry.Envelope2D;
>>>>>>>
import org.geotools.geometry.Envelope2D;
<<<<<<<
import org.opentripplanner.gbannotation.*;
=======
import org.opentripplanner.gbannotation.BikeRentalStationUnlinked;
import org.opentripplanner.gbannotation.ConflictingBikeTags;
import org.opentripplanner.gbannotation.GraphBuilderAnnotation;
import org.opentripplanner.gbannotation.Graphwide;
import org.opentripplanner.gbannotation.LevelAmbiguous;
import org.opentripplanner.gbannotation.ParkAndRideUnlinked;
import org.opentripplanner.gbannotation.StreetCarSpeedZero;
import org.opentripplanner.gbannotation.TurnRestrictionBad;
import org.opentripplanner.gbannotation.TurnRestrictionException;
import org.opentripplanner.gbannotation.TurnRestrictionUnknown;
>>>>>>>
import org.opentripplanner.gbannotation.ConflictingBikeTags;
import org.opentripplanner.gbannotation.Graphwide;
import org.opentripplanner.gbannotation.LevelAmbiguous;
import org.opentripplanner.gbannotation.ParkAndRideUnlinked;
import org.opentripplanner.gbannotation.StreetCarSpeedZero;
import org.opentripplanner.gbannotation.TurnRestrictionBad;
import org.opentripplanner.gbannotation.TurnRestrictionException;
import org.opentripplanner.gbannotation.TurnRestrictionUnknown;
<<<<<<<
import org.opentripplanner.routing.edgetype.*;
=======
import org.opentripplanner.routing.edgetype.AreaEdge;
import org.opentripplanner.routing.edgetype.AreaEdgeList;
import org.opentripplanner.routing.edgetype.ElevatorAlightEdge;
import org.opentripplanner.routing.edgetype.ElevatorBoardEdge;
import org.opentripplanner.routing.edgetype.ElevatorHopEdge;
import org.opentripplanner.routing.edgetype.FreeEdge;
import org.opentripplanner.routing.edgetype.NamedArea;
import org.opentripplanner.routing.edgetype.ParkAndRideEdge;
import org.opentripplanner.routing.edgetype.ParkAndRideLinkEdge;
import org.opentripplanner.routing.edgetype.PlainStreetEdge;
import org.opentripplanner.routing.edgetype.RentABikeOffEdge;
import org.opentripplanner.routing.edgetype.RentABikeOnEdge;
import org.opentripplanner.routing.edgetype.StreetEdge;
import org.opentripplanner.routing.edgetype.StreetTraversalPermission;
>>>>>>>
import org.opentripplanner.routing.edgetype.AreaEdge;
import org.opentripplanner.routing.edgetype.AreaEdgeList;
import org.opentripplanner.routing.edgetype.ElevatorAlightEdge;
import org.opentripplanner.routing.edgetype.ElevatorBoardEdge;
import org.opentripplanner.routing.edgetype.ElevatorHopEdge;
import org.opentripplanner.routing.edgetype.FreeEdge;
import org.opentripplanner.routing.edgetype.NamedArea;
import org.opentripplanner.routing.edgetype.ParkAndRideEdge;
import org.opentripplanner.routing.edgetype.ParkAndRideLinkEdge;
import org.opentripplanner.routing.edgetype.PlainStreetEdge;
import org.opentripplanner.routing.edgetype.RentABikeOffEdge;
import org.opentripplanner.routing.edgetype.RentABikeOnEdge;
import org.opentripplanner.routing.edgetype.StreetEdge;
import org.opentripplanner.routing.edgetype.StreetTraversalPermission;
<<<<<<<
import org.opentripplanner.routing.vertextype.*;
=======
import org.opentripplanner.routing.vertextype.BikeRentalStationVertex;
import org.opentripplanner.routing.vertextype.ElevatorOffboardVertex;
import org.opentripplanner.routing.vertextype.ElevatorOnboardVertex;
import org.opentripplanner.routing.vertextype.ExitVertex;
import org.opentripplanner.routing.vertextype.IntersectionVertex;
import org.opentripplanner.routing.vertextype.ParkAndRideVertex;
>>>>>>>
import org.opentripplanner.routing.vertextype.BikeRentalStationVertex;
import org.opentripplanner.routing.vertextype.ElevatorOffboardVertex;
import org.opentripplanner.routing.vertextype.ElevatorOnboardVertex;
import org.opentripplanner.routing.vertextype.ExitVertex;
import org.opentripplanner.routing.vertextype.IntersectionVertex;
import org.opentripplanner.routing.vertextype.ParkAndRideVertex;
<<<<<<<
import java.util.*;
=======
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryCollection;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.MultiLineString;
import com.vividsolutions.jts.geom.MultiPolygon;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
>>>>>>>
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.Multimap;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryCollection;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.MultiLineString;
import com.vividsolutions.jts.geom.MultiPolygon;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
<<<<<<<
StreetTraversalPermission areaPermissions = getPermissionsForEntity(way,
StreetTraversalPermission.PEDESTRIAN_AND_BICYCLE);
if (areaPermissions == StreetTraversalPermission.NONE)
continue;
_areas.add(new Area(way, Arrays.asList(way), Collections.<OSMWay>emptyList()));
=======
newArea(new Area(way, Arrays.asList(way), Collections.<OSMWay> emptyList()));
>>>>>>>
newArea(new Area(way, Arrays.asList(way), Collections.<OSMWay> emptyList())); |
<<<<<<<
import org.opentripplanner.routing.error.VertexNotFoundException;
import org.opentripplanner.routing.request.RoutingRequest;
=======
import org.opentripplanner.routing.api.request.RoutingRequest;
import org.opentripplanner.routing.error.RoutingValidationException;
>>>>>>>
import org.opentripplanner.routing.api.request.RoutingRequest;
import org.opentripplanner.routing.error.RoutingValidationException;
<<<<<<<
=======
setResponseMetadata(requestTransitDataProvider, transitResponse);
checkIfTransitConnectionExists(transitResponse);
>>>>>>>
checkIfTransitConnectionExists(transitResponse); |
<<<<<<<
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
=======
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicHeader;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
>>>>>>>
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicHeader;
<<<<<<<
static final HttpClient httpClient;
static {
PoolingHttpClientConnectionManager mgr = new PoolingHttpClientConnectionManager();
mgr.setDefaultMaxPerRoute(20);
int timeout = 10 * 1000;
SocketConfig cfg = SocketConfig.custom()
.setSoTimeout(timeout)
.build();
mgr.setDefaultSocketConfig(cfg);
httpClient = HttpClients.custom()
.setConnectionManager(mgr)
.build();
}
=======
private final String workerId = UUID.randomUUID().toString().replace("-", ""); // a unique identifier for each worker so the broker can catalog them
DefaultHttpClient httpClient = new DefaultHttpClient();
>>>>>>>
static final HttpClient httpClient;
static {
PoolingHttpClientConnectionManager mgr = new PoolingHttpClientConnectionManager();
mgr.setDefaultMaxPerRoute(20);
int timeout = 10 * 1000;
SocketConfig cfg = SocketConfig.custom()
.setSoTimeout(timeout)
.build();
mgr.setDefaultSocketConfig(cfg);
httpClient = HttpClients.custom()
.setConnectionManager(mgr)
.build();
}
private final String workerId = UUID.randomUUID().toString().replace("-", ""); // a unique identifier for each worker so the broker can catalog them
<<<<<<<
// Run a POST request (long-polling for work) indicating which graph this worker prefers to work on
String url = BROKER_BASE_URL + "/dequeue/" + graphId;
HttpPost httpPost = new HttpPost(url);
=======
// Run a GET request (long-polling for work) indicating which graph this worker prefers to work on
String url = BROKER_BASE_URL + "/" + graphId;
HttpGet httpGet = new HttpGet(url);
httpGet.setHeader(new BasicHeader(WORKER_ID_HEADER, workerId));
>>>>>>>
// Run a POST request (long-polling for work) indicating which graph this worker prefers to work on
String url = BROKER_BASE_URL + "/dequeue/" + graphId;
HttpPost httpPost = new HttpPost(url);
httpPost.setHeader(new BasicHeader(WORKER_ID_HEADER, workerId)); |
<<<<<<<
BICYCLE_AND_DRIVING(8 | 4 | 2),
PEDESTRIAN_AND_DRIVING(8 | 4 | 1),
=======
PEDESTRIAN_CAR_MOTOR(8 | 4 | 1),
BICYCLE_CAR_MOTOR(8 | 4 | 2),
>>>>>>>
PEDESTRIAN_AND_DRIVING(8 | 4 | 1),
BICYCLE_AND_DRIVING(8 | 4 | 2), |
<<<<<<<
* The traverse mode for all trips in this pattern.
*/
public final TransitMode mode;
/**
=======
>>>>>>>
<<<<<<<
this.mode = GtfsLibrary.getTransitMode(this.route);
=======
>>>>>>> |
<<<<<<<
s1.addAlerts(getNotes());
if (this.toll && traverseMode == TraverseMode.CAR)
s1.addAlert(Alert.createSimpleAlerts("Toll road"));
=======
s1.addAlerts(notes);
>>>>>>>
s1.addAlerts(notes);
if (this.toll && traverseMode == TraverseMode.CAR)
s1.addAlert(Alert.createSimpleAlerts("Toll road")); |
<<<<<<<
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.CoordinateSequenceFactory;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
=======
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.TimeZone;
>>>>>>>
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.CoordinateSequenceFactory;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.TimeZone; |
<<<<<<<
try {
State[] states = transferStates.toArray(new State[0]);
GraphPath graphPath = new GraphPath(states[states.length - 1], false);
Itinerary subItinerary = GraphPathToItineraryMapper
.generateItinerary(graphPath, request.locale);
// TODO OTP2 We use the duration initially calculated for use during routing
// because they do not always match up and we risk getting negative wait times
// (#2955)
if (subItinerary.legs.size() != 1) {
throw new IllegalArgumentException("Sub itineraries should only contain one leg.");
}
subItinerary.legs.get(0).startTime = createCalendar(pathLeg.fromTime());
subItinerary.legs.get(0).endTime = createCalendar(pathLeg.toTime());
if (!onlyIfNonZeroDistance || subItinerary.nonTransitDistanceMeters > 0) {
legs.addAll(subItinerary.legs);
}
} catch (TrivialPathException e) {
// Ignore, no legs need be copied
=======
State[] states = transferStates.toArray(new State[0]);
GraphPath graphPath = new GraphPath(states[states.length - 1], false);
Itinerary subItinerary = GraphPathToItineraryMapper
.generateItinerary(graphPath, false, true, request.locale);
// TODO OTP2 We use the duration initially calculated for use during routing
// because they do not always match up and we risk getting negative wait times
// (#2955)
if (subItinerary.legs.size() != 1) {
throw new IllegalArgumentException("Sub itineraries should only contain one leg.");
}
subItinerary.legs.get(0).startTime = createCalendar(pathLeg.fromTime());
subItinerary.legs.get(0).endTime = createCalendar(pathLeg.toTime());
if (!onlyIfNonZeroDistance || subItinerary.nonTransitDistanceMeters > 0) {
legs.addAll(subItinerary.legs);
>>>>>>>
State[] states = transferStates.toArray(new State[0]);
GraphPath graphPath = new GraphPath(states[states.length - 1], false);
Itinerary subItinerary = GraphPathToItineraryMapper
.generateItinerary(graphPath, request.locale);
// TODO OTP2 We use the duration initially calculated for use during routing
// because they do not always match up and we risk getting negative wait times
// (#2955)
if (subItinerary.legs.size() != 1) {
throw new IllegalArgumentException("Sub itineraries should only contain one leg.");
}
subItinerary.legs.get(0).startTime = createCalendar(pathLeg.fromTime());
subItinerary.legs.get(0).endTime = createCalendar(pathLeg.toTime());
if (!onlyIfNonZeroDistance || subItinerary.nonTransitDistanceMeters > 0) {
legs.addAll(subItinerary.legs); |
<<<<<<<
s1.incrementWeight(weight);
if (walkingBike || TraverseMode.BICYCLE.equals(traverseMode)) {
if (!(backWalkingBike || TraverseMode.BICYCLE.equals(backMode))) {
s1.incrementTimeInSeconds(options.bikeSwitchTime);
s1.incrementWeight(options.bikeSwitchCost);
}
}
=======
>>>>>>>
if (walkingBike || TraverseMode.BICYCLE.equals(traverseMode)) {
if (!(backWalkingBike || TraverseMode.BICYCLE.equals(backMode))) {
s1.incrementTimeInSeconds(options.bikeSwitchTime);
s1.incrementWeight(options.bikeSwitchCost);
}
} |
<<<<<<<
=======
import org.opentripplanner.routing.core.TraverseModeSet;
import org.opentripplanner.routing.edgetype.ElevatorAlightEdge;
import org.opentripplanner.routing.edgetype.ElevatorBoardEdge;
import org.opentripplanner.routing.edgetype.ElevatorHopEdge;
>>>>>>>
import org.opentripplanner.routing.edgetype.ElevatorAlightEdge;
import org.opentripplanner.routing.edgetype.ElevatorBoardEdge;
import org.opentripplanner.routing.edgetype.ElevatorHopEdge;
<<<<<<<
import org.opentripplanner.model.TransitMode;
=======
import org.opentripplanner.routing.vertextype.ElevatorOffboardVertex;
import org.opentripplanner.routing.vertextype.ElevatorOnboardVertex;
import org.opentripplanner.routing.vertextype.TransitBoardingAreaVertex;
import org.opentripplanner.routing.vertextype.TransitEntranceVertex;
import org.opentripplanner.routing.vertextype.TransitPathwayNodeVertex;
>>>>>>>
import org.opentripplanner.model.TransitMode;
import org.opentripplanner.routing.vertextype.ElevatorOffboardVertex;
import org.opentripplanner.routing.vertextype.ElevatorOnboardVertex;
import org.opentripplanner.routing.vertextype.TransitBoardingAreaVertex;
import org.opentripplanner.routing.vertextype.TransitEntranceVertex;
import org.opentripplanner.routing.vertextype.TransitPathwayNodeVertex; |
<<<<<<<
import org.opentripplanner.model.calendar.CalendarServiceData;
import org.opentripplanner.routing.algorithm.AStar;
=======
import org.opentripplanner.routing.algorithm.astar.AStar;
>>>>>>>
import org.opentripplanner.model.calendar.CalendarServiceData;
import org.opentripplanner.routing.algorithm.astar.AStar; |
<<<<<<<
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.List;
=======
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.LineString;
import java.util.Locale;
import org.opentripplanner.util.I18NString;
import org.opentripplanner.util.NonLocalizedString;
>>>>>>>
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Locale; |
<<<<<<<
import com.google.common.collect.Lists;
=======
>>>>>>>
<<<<<<<
import org.joda.time.DateTimeZone;
=======
import org.onebusaway.gtfs.model.Route;
>>>>>>>
import java.util.Map;
import org.joda.time.DateTimeZone;
import org.onebusaway.gtfs.model.Route;
<<<<<<<
LOG.info("Found {} initial stops", accessTimes.size());
=======
LOG.info("Found {} initial transit stops", accessTimes.size());
/** A compacted tabular representation of all the patterns that are running on this date in this time window. */
>>>>>>>
LOG.info("Found {} initial transit stops", accessTimes.size());
/** A compacted tabular representation of all the patterns that are running on this date in this time window. */
<<<<<<<
//LOG.info("Finished RAPTOR search in {} milliseconds", System.currentTimeMillis() - roundStartTime);
// loop over all states, accumulating mins, maxes, etc.
for (TObjectIntIterator<TransitStop> it = raptor.iterator(); it.hasNext();) {
it.advance();
int et = it.value() - startTime;
// this can happen if the time is left from a previous search at a later start time
/*if (et > 120 * 60)
continue;*/
TransitStop v = it.key();
if (et < mins.get(v))
mins.put(v, et);
if (et > maxs.get(v))
maxs.put(v, et);
accumulator.putIfAbsent(v, 0);
counts.putIfAbsent(v, 0);
accumulator.adjustValue(v, et);
counts.increment(v);
=======
// Propagate minimum travel times out to vertices in the street network
StopTreeCache stopTreeCache = graph.index.getStopTreeCache();
TObjectIntIterator<TransitStop> resultIterator = raptor.iterator();
int[] minsPerVertex = new int[Vertex.getMaxIndex()];
while (resultIterator.hasNext()) {
resultIterator.advance();
TransitStop transitStop = resultIterator.key();
int arrivalTime = resultIterator.value();
if (arrivalTime == Integer.MAX_VALUE) continue; // stop was not reached in this round (why was it included in map?)
int elapsedTime = arrivalTime - departureTime;
stopTreeCache.propagateStop(transitStop, elapsedTime, request.walkSpeed, minsPerVertex);
>>>>>>>
// Propagate minimum travel times out to vertices in the street network
StopTreeCache stopTreeCache = graph.index.getStopTreeCache();
TObjectIntIterator<TransitStop> resultIterator = raptor.iterator();
int[] minsPerVertex = new int[Vertex.getMaxIndex()];
while (resultIterator.hasNext()) {
resultIterator.advance();
TransitStop transitStop = resultIterator.key();
int arrivalTime = resultIterator.value();
if (arrivalTime == Integer.MAX_VALUE) continue; // stop was not reached in this round (why was it included in map?)
int elapsedTime = arrivalTime - departureTime;
stopTreeCache.propagateStop(transitStop, elapsedTime, request.walkSpeed, minsPerVertex); |
<<<<<<<
this.stateData.opt = opt;
this.stateData.startTime = time;
this.stateData.tripSeqHash = 0;
this.time = time;
=======
stateData.options = opt;
stateData.startTime = time;
stateData.tripSeqHash = 0;
stateData.usingRentedBike = false;
// System.out.printf("new state %d %s %s \n", this.time, this.vertex, stateData.options);
}
public State createState(long time, Vertex vertex, TraverseOptions options) {
return new State(time, vertex, options);
>>>>>>>
this.stateData.opt = options;
this.stateData.startTime = time;
this.stateData.tripSeqHash = 0;
this.stateData.usingRentedBike = false;
this.time = time;
<<<<<<<
public TraverseOptions getOptions () {
return stateData.opt;
}
=======
public TraverseMode getNonTransitMode(TraverseOptions options) {
TraverseModeSet modes = options.getModes();
if (modes.getCar())
return TraverseMode.CAR;
if (modes.getWalk() && !isBikeRenting())
return TraverseMode.WALK;
if (modes.getBicycle())
return TraverseMode.BICYCLE;
return null;
}
>>>>>>>
public TraverseOptions getOptions () {
return stateData.opt;
}
public TraverseMode getNonTransitMode(TraverseOptions options) {
TraverseModeSet modes = options.getModes();
if (modes.getCar())
return TraverseMode.CAR;
if (modes.getWalk() && !isBikeRenting())
return TraverseMode.WALK;
if (modes.getBicycle())
return TraverseMode.BICYCLE;
return null;
} |
<<<<<<<
=======
import com.google.gerrit.reviewdb.client.BooleanProjectConfig;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
>>>>>>> |
<<<<<<<
long size, long sizeDelta) {
final FileMode oldMode = fileHeader.getOldMode();
final FileMode newMode = fileHeader.getNewMode();
if (oldMode == FileMode.GITLINK || newMode == FileMode.GITLINK) {
return new PatchListEntry(fileHeader, Collections.<Edit> emptyList(),
size, sizeDelta);
}
=======
long sizeDelta) {
>>>>>>>
long size, long sizeDelta) { |
<<<<<<<
loader.buildGraph(_graph, extra);
=======
loader.buildGraph(graph, extra);
// Need to set up the index because buildGraph doesn't do it.
graph.rebuildVertexAndEdgeIndices();
>>>>>>>
loader.buildGraph(graph, extra); |
<<<<<<<
options.speed = 1.0;
options.setRoutingContext(_graph, _graph.getVertex("56th_24th"), _graph.getVertex("leary_20th"));
ShortestPathTree tree = new GenericAStar().getShortestPathTree(options);
=======
options.setWalkSpeed(1.0);
ShortestPathTree tree = AStar.getShortestPathTree(_graph, "56th_24th", "leary_20th",
0, options);
>>>>>>>
options.setWalkSpeed(1.0);
options.setRoutingContext(_graph, _graph.getVertex("56th_24th"), _graph.getVertex("leary_20th"));
ShortestPathTree tree = new GenericAStar().getShortestPathTree(options); |
<<<<<<<
/**
* Checks permissions of the street edge if specified modes are allowed to travel.
*
* Barriers aren't taken into account. So it can happen that canTraverse returns True.
* But doTraverse returns false. Since there are barriers on a street.
*
* This is because this function is used also on street when searching for start/stop.
* Those streets are then split. On splitted streets can be possible to drive with a CAR because
* it is only blocked from one way.
* @param modes
* @return
*/
=======
//For testing only
public StreetEdge(StreetVertex v1, StreetVertex v2, LineString geometry,
String name, double length,
StreetTraversalPermission permission, boolean back) {
this(v1, v2, geometry, new NonLocalizedString(name), length, permission, back);
}
public boolean canTraverse(RoutingRequest options) {
if (options.wheelchairAccessible) {
if (!isWheelchairAccessible()) {
return false;
}
if (getMaxSlope() > options.maxSlope) {
return false;
}
}
return canTraverse(options.modes);
}
>>>>>>>
//For testing only
public StreetEdge(StreetVertex v1, StreetVertex v2, LineString geometry,
String name, double length,
StreetTraversalPermission permission, boolean back) {
this(v1, v2, geometry, new NonLocalizedString(name), length, permission, back);
}
/**
* Checks permissions of the street edge if specified modes are allowed to travel.
*
* Barriers aren't taken into account. So it can happen that canTraverse returns True.
* But doTraverse returns false. Since there are barriers on a street.
*
* This is because this function is used also on street when searching for start/stop.
* Those streets are then split. On splitted streets can be possible to drive with a CAR because
* it is only blocked from one way.
* @param modes
* @return
*/ |
<<<<<<<
private static final long serialVersionUID = 1L;
public RentABikeAbstractEdge(Vertex from, Vertex to) {
super(from, to);
}
protected State traverseRent(State s0) {
RoutingRequest options = s0.getOptions();
/*
* If we already have a bike (rented or own) we won't go any faster by having a second one.
*/
if (!s0.getNonTransitMode(options).equals(TraverseMode.WALK))
return null;
/*
* To rent a bike, we need to have BICYCLE in allowed modes.
*/
if (!options.getModes().contains(TraverseMode.BICYCLE))
return null;
BikeRentalStationVertex dropoff = (BikeRentalStationVertex) tov;
if (options.isUseBikeRentalAvailabilityInformation() && dropoff.getBikesAvailable() == 0) {
return null;
}
EdgeNarrative en = new FixedModeEdge(this, s0.getNonTransitMode(options));
StateEditor s1 = s0.edit(this, en);
s1.incrementWeight(options.bikeRentalPickupCost);
s1.incrementTimeInSeconds(options.bikeRentalPickupTime);
s1.setBikeRenting(true);
State s1b = s1.makeState();
return s1b;
}
protected State traverseDropoff(State s0) {
RoutingRequest options = s0.getOptions();
/*
* To dropoff a bike, we need to have rented one.
*/
if (!s0.isBikeRenting())
return null;
BikeRentalStationVertex pickup = (BikeRentalStationVertex) tov;
if (options.isUseBikeRentalAvailabilityInformation() && pickup.getSpacesAvailable() == 0) {
return null;
}
EdgeNarrative en = new FixedModeEdge(this, s0.getNonTransitMode(options));
StateEditor s1e = s0.edit(this, en);
s1e.incrementWeight(options.bikeRentalDropoffCost);
s1e.incrementTimeInSeconds(options.bikeRentalDropoffTime);
s1e.setBikeRenting(false);
State s1 = s1e.makeState();
return s1;
}
@Override
public double getDistance() {
return 0;
}
@Override
public LineString getGeometry() {
return null;
}
@Override
public TraverseMode getMode() {
return TraverseMode.WALK;
}
@Override
public String getName() {
return getToVertex().getName();
}
@Override
public boolean hasBogusName() {
return false;
}
=======
private static final long serialVersionUID = 1L;
private String network;
public RentABikeAbstractEdge(Vertex from, Vertex to, String network) {
super(from, to);
this.network = network;
}
protected State traverseRent(State s0) {
RoutingRequest options = s0.getOptions();
/*
* If we already have a bike (rented or own) we won't go any faster by having a second one.
*/
if (!s0.getNonTransitMode(options).equals(TraverseMode.WALK))
return null;
/*
* To rent a bike, we need to have BICYCLE in allowed modes.
*/
if (!options.getModes().contains(TraverseMode.BICYCLE))
return null;
BikeRentalStationVertex dropoff = (BikeRentalStationVertex) tov;
if (options.isUseBikeRentalAvailabilityInformation() && dropoff.getBikesAvailable() == 0) {
return null;
}
EdgeNarrative en = new FixedModeEdge(this, s0.getNonTransitMode(options));
StateEditor s1 = s0.edit(this, en);
s1.incrementWeight(options.isArriveBy() ? options.bikeRentalDropoffCost
: options.bikeRentalPickupCost);
s1.incrementTimeInSeconds(options.isArriveBy() ? options.bikeRentalDropoffTime
: options.bikeRentalPickupTime);
s1.setBikeRenting(true);
s1.setBikeRentalNetwork(network);
State s1b = s1.makeState();
return s1b;
}
protected State traverseDropoff(State s0) {
RoutingRequest options = s0.getOptions();
/*
* To dropoff a bike, we need to have rented one.
*/
if (!s0.isBikeRenting() || !s0.getBikeRentalNetwork().equals(network))
return null;
BikeRentalStationVertex pickup = (BikeRentalStationVertex) tov;
if (options.isUseBikeRentalAvailabilityInformation() && pickup.getSpacesAvailable() == 0) {
return null;
}
EdgeNarrative en = new FixedModeEdge(this, s0.getNonTransitMode(options));
StateEditor s1e = s0.edit(this, en);
s1e.incrementWeight(options.isArriveBy() ? options.bikeRentalPickupCost
: options.bikeRentalDropoffCost);
s1e.incrementTimeInSeconds(options.isArriveBy() ? options.bikeRentalPickupTime
: options.bikeRentalDropoffTime);
s1e.setBikeRenting(false);
State s1 = s1e.makeState();
return s1;
}
@Override
public double getDistance() {
return 0;
}
@Override
public Geometry getGeometry() {
return null;
}
@Override
public TraverseMode getMode() {
return TraverseMode.WALK;
}
@Override
public String getName() {
return getToVertex().getName();
}
@Override
public boolean hasBogusName() {
return false;
}
>>>>>>>
private static final long serialVersionUID = 1L;
private String network;
public RentABikeAbstractEdge(Vertex from, Vertex to, String network) {
super(from, to);
this.network = network;
}
protected State traverseRent(State s0) {
RoutingRequest options = s0.getOptions();
/*
* If we already have a bike (rented or own) we won't go any faster by having a second one.
*/
if (!s0.getNonTransitMode(options).equals(TraverseMode.WALK))
return null;
/*
* To rent a bike, we need to have BICYCLE in allowed modes.
*/
if (!options.getModes().contains(TraverseMode.BICYCLE))
return null;
BikeRentalStationVertex dropoff = (BikeRentalStationVertex) tov;
if (options.isUseBikeRentalAvailabilityInformation() && dropoff.getBikesAvailable() == 0) {
return null;
}
EdgeNarrative en = new FixedModeEdge(this, s0.getNonTransitMode(options));
StateEditor s1 = s0.edit(this, en);
s1.incrementWeight(options.isArriveBy() ? options.bikeRentalDropoffCost
: options.bikeRentalPickupCost);
s1.incrementTimeInSeconds(options.isArriveBy() ? options.bikeRentalDropoffTime
: options.bikeRentalPickupTime);
s1.setBikeRenting(true);
s1.setBikeRentalNetwork(network);
State s1b = s1.makeState();
return s1b;
}
protected State traverseDropoff(State s0) {
RoutingRequest options = s0.getOptions();
/*
* To dropoff a bike, we need to have rented one.
*/
if (!s0.isBikeRenting() || !s0.getBikeRentalNetwork().equals(network))
return null;
BikeRentalStationVertex pickup = (BikeRentalStationVertex) tov;
if (options.isUseBikeRentalAvailabilityInformation() && pickup.getSpacesAvailable() == 0) {
return null;
}
EdgeNarrative en = new FixedModeEdge(this, s0.getNonTransitMode(options));
StateEditor s1e = s0.edit(this, en);
s1e.incrementWeight(options.isArriveBy() ? options.bikeRentalPickupCost
: options.bikeRentalDropoffCost);
s1e.incrementTimeInSeconds(options.isArriveBy() ? options.bikeRentalPickupTime
: options.bikeRentalDropoffTime);
s1e.setBikeRenting(false);
State s1 = s1e.makeState();
return s1;
}
@Override
public double getDistance() {
return 0;
}
@Override
public LineString getGeometry() {
return null;
}
@Override
public TraverseMode getMode() {
return TraverseMode.WALK;
}
@Override
public String getName() {
return getToVertex().getName();
}
@Override
public boolean hasBogusName() {
return false;
} |
<<<<<<<
import org.opentripplanner.model.BikeRentalStationInfo;
=======
import org.opentripplanner.model.Agency;
import org.opentripplanner.model.Route;
import org.opentripplanner.model.Stop;
import org.opentripplanner.model.Trip;
import org.opentripplanner.profile.BikeRentalStationInfo;
>>>>>>>
import org.opentripplanner.model.BikeRentalStationInfo;
import org.opentripplanner.model.Agency;
import org.opentripplanner.model.Route;
import org.opentripplanner.model.Stop;
import org.opentripplanner.model.Trip; |
<<<<<<<
private static final long serialVersionUID = MavenVersion.VERSION.getUID();
private static final Logger LOG = LoggerFactory.getLogger(TraverseOptions.class);
private static final int CLAMP_ITINERARIES = 3;
private static final int CLAMP_TRANSFERS = 4;
=======
private static final long serialVersionUID = 1L;
private static final Logger _log = LoggerFactory.getLogger(TraverseOptions.class);
/** max speed along streets, in meters per second */
private double walkSpeed;
private double bikeSpeed;
private double carSpeed;
private TraverseModeSet modes;
public Calendar calendar;
private CalendarService calendarService;
private Map<AgencyAndId, Set<ServiceDate>> serviceDatesByServiceId = new HashMap<AgencyAndId, Set<ServiceDate>>();
private boolean back = false;
public boolean wheelchairAccessible = false;
public OptimizeType optimizeFor = OptimizeType.QUICK;
/**
* A maxWalkDistance of Double.MAX_VALUE indicates no limit
*/
double maxWalkDistance = Double.MAX_VALUE;
>>>>>>>
private static final long serialVersionUID = MavenVersion.VERSION.getUID();
private static final Logger LOG = LoggerFactory.getLogger(TraverseOptions.class);
private static final int CLAMP_ITINERARIES = 3;
private static final int CLAMP_TRANSFERS = 4;
<<<<<<<
walkingOptions.speed *= 0.3; //assume walking bikes is slow
walkingOptions.optimize = optimize;
=======
walkingOptions.walkSpeed *= 0.3; //assume walking bikes is slow
walkingOptions.optimizeFor = optimizeFor;
>>>>>>>
walkingOptions.walkSpeed *= 0.3; //assume walking bikes is slow
walkingOptions.optimize = optimize; |
<<<<<<<
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InvalidClassException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamClass;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.prefs.Preferences;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.collect.*;
import gnu.trove.list.TDoubleList;
import gnu.trove.list.linked.TDoubleLinkedList;
import org.apache.commons.math3.stat.descriptive.rank.Median;
=======
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multiset;
import com.google.common.collect.Sets;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
>>>>>>>
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InvalidClassException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamClass;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.prefs.Preferences;
import gnu.trove.list.TDoubleList;
import gnu.trove.list.linked.TDoubleLinkedList;
import org.apache.commons.math3.stat.descriptive.rank.Median;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multiset;
import com.google.common.collect.Sets;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
<<<<<<<
/**
* Calculates Transit center from median of coordinates of all transitStops if graph
* has transit. If it doesn't it isn't calculated. (mean walue of min, max latitude and longitudes are used)
*
* Transit center is saved in center variable
*
* This speeds up calculation, but problem is that median needs to have all of latitudes/longitudes
* in memory, this can become problematic in large installations. It works without a problem on New York State.
* @see GraphMetadata
*/
public void calculateTransitCenter() {
if (hasTransit) {
TDoubleList latitudes = new TDoubleLinkedList();
TDoubleList longitudes = new TDoubleLinkedList();
Median median = new Median();
getVertices().stream()
.filter(v -> v instanceof TransitStop)
.forEach(v -> {
latitudes.add(v.getLat());
longitudes.add(v.getLon());
});
median.setData(latitudes.toArray());
double medianLatitude = median.evaluate();
median = new Median();
median.setData(longitudes.toArray());
double medianLongitude = median.evaluate();
this.center = new Coordinate(medianLongitude, medianLatitude);
}
}
public Optional<Coordinate> getCenter() {
return Optional.ofNullable(center);
}
=======
>>>>>>>
/**
* Calculates Transit center from median of coordinates of all transitStops if graph
* has transit. If it doesn't it isn't calculated. (mean walue of min, max latitude and longitudes are used)
*
* Transit center is saved in center variable
*
* This speeds up calculation, but problem is that median needs to have all of latitudes/longitudes
* in memory, this can become problematic in large installations. It works without a problem on New York State.
* @see GraphMetadata
*/
public void calculateTransitCenter() {
if (hasTransit) {
TDoubleList latitudes = new TDoubleLinkedList();
TDoubleList longitudes = new TDoubleLinkedList();
Median median = new Median();
getVertices().stream()
.filter(v -> v instanceof TransitStop)
.forEach(v -> {
latitudes.add(v.getLat());
longitudes.add(v.getLon());
});
median.setData(latitudes.toArray());
double medianLatitude = median.evaluate();
median = new Median();
median.setData(longitudes.toArray());
double medianLongitude = median.evaluate();
this.center = new Coordinate(medianLongitude, medianLatitude);
}
}
public Optional<Coordinate> getCenter() {
return Optional.ofNullable(center);
} |
<<<<<<<
=======
case MODIFIED:
applied = validateAndHandleModifiedTrip(graph, tripUpdate, feedId, serviceDate);
break;
>>>>>>>
case MODIFIED:
applied = validateAndHandleModifiedTrip(graph, tripUpdate, feedId, serviceDate);
break;
<<<<<<<
protected boolean handleCanceledTrip(TripUpdate tripUpdate, String agencyId,
ServiceDate serviceDate) {
TripDescriptor tripDescriptor = tripUpdate.getTrip();
String tripId = tripDescriptor.getTripId(); // This does not include Agency ID, trips are feed-unique.
TripPattern pattern = getPatternForTripId(tripId);
=======
/**
* Validate and handle GTFS-RT TripUpdate message containing a MODIFIED trip.
*
* @param graph graph to update
* @param tripUpdate GTFS-RT TripUpdate message
* @param feedId
* @param serviceDate
* @return true iff successful
*/
private boolean validateAndHandleModifiedTrip(Graph graph, TripUpdate tripUpdate, String feedId, ServiceDate serviceDate) {
// Preconditions
Preconditions.checkNotNull(graph);
Preconditions.checkNotNull(tripUpdate);
Preconditions.checkNotNull(serviceDate);
//
// Validate modified trip
//
// Check whether trip id of MODIFIED trip is available
TripDescriptor tripDescriptor = tripUpdate.getTrip();
if (!tripDescriptor.hasTripId()) {
LOG.warn("No trip id found for MODIFIED trip, skipping.");
return false;
}
// Check whether trip id already exists in graph
String tripId = tripDescriptor.getTripId();
Trip trip = getTripForTripId(tripId);
if (trip == null) {
// TODO: should we support this and consider it an ADDED trip?
LOG.warn("Graph does not contain trip id of MODIFIED trip, skipping.");
return false;
}
// Check whether a start date exists
if (!tripDescriptor.hasStartDate()) {
// TODO: should we support this and apply update to all days?
LOG.warn("MODIFIED trip doesn't have a start date in TripDescriptor, skipping.");
return false;
} else {
// Check whether service date is served by trip
Set<AgencyAndId> serviceIds = graph.getCalendarService().getServiceIdsOnDate(serviceDate);
if (!serviceIds.contains(trip.getServiceId())) {
// TODO: should we support this and change service id of trip?
LOG.warn("MODIFIED trip has a service date that is not served by trip, skipping.");
return false;
}
}
// Check whether at least two stop updates exist
if (tripUpdate.getStopTimeUpdateCount() < 2) {
LOG.warn("MODIFIED trip has less then two stops, skipping.");
return false;
}
// Check whether all stop times are available and all stops exist
List<Stop> stops = checkNewStopTimeUpdatesAndFindStops(tripUpdate);
if (stops == null) {
return false;
}
//
// Handle modified trip
//
boolean success = handleModifiedTrip(graph, trip, tripUpdate, stops, feedId, serviceDate);
return success;
}
/**
* Handle GTFS-RT TripUpdate message containing a MODIFIED trip.
*
* @param graph graph to update
* @param trip trip that is modified
* @param tripUpdate GTFS-RT TripUpdate message
* @param stops the stops of each StopTimeUpdate in the TripUpdate message
* @param feedId
* @param serviceDate service date for modified trip
* @return true iff successful
*/
private boolean handleModifiedTrip(Graph graph, Trip trip, TripUpdate tripUpdate, List<Stop> stops,
String feedId, ServiceDate serviceDate) {
// Preconditions
Preconditions.checkNotNull(stops);
Preconditions.checkArgument(tripUpdate.getStopTimeUpdateCount() == stops.size(),
"number of stop should match the number of stop time updates");
// Cancel scheduled trip
String tripId = tripUpdate.getTrip().getTripId();
cancelScheduledTrip(tripId, serviceDate);
// Check whether trip id has been used for previously ADDED/MODIFIED trip message and cancel
// previously created trip
cancelPreviouslyAddedTrip(tripId, serviceDate);
// Add new trip
boolean success = addTripToGraphAndBuffer(graph, trip, tripUpdate, stops, serviceDate);
return success;
}
>>>>>>>
/**
* Validate and handle GTFS-RT TripUpdate message containing a MODIFIED trip.
*
* @param graph graph to update
* @param tripUpdate GTFS-RT TripUpdate message
* @param feedId
* @param serviceDate
* @return true iff successful
*/
private boolean validateAndHandleModifiedTrip(Graph graph, TripUpdate tripUpdate, String feedId, ServiceDate serviceDate) {
// Preconditions
Preconditions.checkNotNull(graph);
Preconditions.checkNotNull(tripUpdate);
Preconditions.checkNotNull(serviceDate);
//
// Validate modified trip
//
// Check whether trip id of MODIFIED trip is available
TripDescriptor tripDescriptor = tripUpdate.getTrip();
if (!tripDescriptor.hasTripId()) {
LOG.warn("No trip id found for MODIFIED trip, skipping.");
return false;
}
// Check whether trip id already exists in graph
String tripId = tripDescriptor.getTripId();
Trip trip = getTripForTripId(tripId);
if (trip == null) {
// TODO: should we support this and consider it an ADDED trip?
LOG.warn("Graph does not contain trip id of MODIFIED trip, skipping.");
return false;
}
// Check whether a start date exists
if (!tripDescriptor.hasStartDate()) {
// TODO: should we support this and apply update to all days?
LOG.warn("MODIFIED trip doesn't have a start date in TripDescriptor, skipping.");
return false;
} else {
// Check whether service date is served by trip
Set<AgencyAndId> serviceIds = graph.getCalendarService().getServiceIdsOnDate(serviceDate);
if (!serviceIds.contains(trip.getServiceId())) {
// TODO: should we support this and change service id of trip?
LOG.warn("MODIFIED trip has a service date that is not served by trip, skipping.");
return false;
}
}
// Check whether at least two stop updates exist
if (tripUpdate.getStopTimeUpdateCount() < 2) {
LOG.warn("MODIFIED trip has less then two stops, skipping.");
return false;
}
// Check whether all stop times are available and all stops exist
List<Stop> stops = checkNewStopTimeUpdatesAndFindStops(tripUpdate);
if (stops == null) {
return false;
}
//
// Handle modified trip
//
boolean success = handleModifiedTrip(graph, trip, tripUpdate, stops, feedId, serviceDate);
return success;
}
/**
* Handle GTFS-RT TripUpdate message containing a MODIFIED trip.
*
* @param graph graph to update
* @param trip trip that is modified
* @param tripUpdate GTFS-RT TripUpdate message
* @param stops the stops of each StopTimeUpdate in the TripUpdate message
* @param feedId
* @param serviceDate service date for modified trip
* @return true iff successful
*/
private boolean handleModifiedTrip(Graph graph, Trip trip, TripUpdate tripUpdate, List<Stop> stops,
String feedId, ServiceDate serviceDate) {
// Preconditions
Preconditions.checkNotNull(stops);
Preconditions.checkArgument(tripUpdate.getStopTimeUpdateCount() == stops.size(),
"number of stop should match the number of stop time updates");
// Cancel scheduled trip
String tripId = tripUpdate.getTrip().getTripId();
cancelScheduledTrip(tripId, serviceDate);
// Check whether trip id has been used for previously ADDED/MODIFIED trip message and cancel
// previously created trip
cancelPreviouslyAddedTrip(tripId, serviceDate);
// Add new trip
boolean success = addTripToGraphAndBuffer(graph, trip, tripUpdate, stops, serviceDate);
return success;
} |
<<<<<<<
import com.google.common.collect.Sets;
import org.onebusaway.gtfs.model.AgencyAndId;
import org.onebusaway.gtfs.model.Route;
import org.onebusaway.gtfs.model.Trip;
=======
import org.opentripplanner.model.FeedScopedId;
import org.opentripplanner.model.Route;
import org.opentripplanner.model.Trip;
>>>>>>>
import com.google.common.collect.Sets;
import org.opentripplanner.model.FeedScopedId;
import org.opentripplanner.model.Route;
import org.opentripplanner.model.Trip; |
<<<<<<<
GtfsRelationalDao dao = context.getDao();
feedId = context.getFeedId().getId();
for (ShapePoint shapePoint : dao.getAllEntitiesForType(ShapePoint.class)) {
shapePoint.getShapeId().setAgencyId(feedId);
}
for (Route route : dao.getAllEntitiesForType(Route.class)) {
route.getId().setAgencyId(feedId);
}
for (Stop stop : dao.getAllEntitiesForType(Stop.class)) {
stop.getId().setAgencyId(feedId);
}
for (Trip trip : dao.getAllEntitiesForType(Trip.class)) {
trip.getId().setAgencyId(feedId);
}
for (ServiceCalendar serviceCalendar : dao.getAllEntitiesForType(ServiceCalendar.class)) {
serviceCalendar.getServiceId().setAgencyId(feedId);
}
for (ServiceCalendarDate serviceCalendarDate : dao.getAllEntitiesForType(ServiceCalendarDate.class)) {
serviceCalendarDate.getServiceId().setAgencyId(feedId);
}
for (FareAttribute fareAttribute : dao.getAllEntitiesForType(FareAttribute.class)) {
fareAttribute.getId().setAgencyId(feedId);
}
for (Pathway pathway : dao.getAllEntitiesForType(Pathway.class)) {
pathway.getId().setAgencyId(feedId);
}
GTFSPatternHopFactory factory = new GTFSPatternHopFactory(context);
=======
final GTFSPatternHopFactory factory = new GTFSPatternHopFactory(context);
>>>>>>>
GtfsRelationalDao dao = context.getDao();
feedId = context.getFeedId().getId();
for (ShapePoint shapePoint : dao.getAllEntitiesForType(ShapePoint.class)) {
shapePoint.getShapeId().setAgencyId(feedId);
}
for (Route route : dao.getAllEntitiesForType(Route.class)) {
route.getId().setAgencyId(feedId);
}
for (Stop stop : dao.getAllEntitiesForType(Stop.class)) {
stop.getId().setAgencyId(feedId);
}
for (Trip trip : dao.getAllEntitiesForType(Trip.class)) {
trip.getId().setAgencyId(feedId);
}
for (ServiceCalendar serviceCalendar : dao.getAllEntitiesForType(ServiceCalendar.class)) {
serviceCalendar.getServiceId().setAgencyId(feedId);
}
for (ServiceCalendarDate serviceCalendarDate : dao.getAllEntitiesForType(ServiceCalendarDate.class)) {
serviceCalendarDate.getServiceId().setAgencyId(feedId);
}
for (FareAttribute fareAttribute : dao.getAllEntitiesForType(FareAttribute.class)) {
fareAttribute.getId().setAgencyId(feedId);
}
for (Pathway pathway : dao.getAllEntitiesForType(Pathway.class)) {
pathway.getId().setAgencyId(feedId);
}
GTFSPatternHopFactory factory = new GTFSPatternHopFactory(context);
<<<<<<<
AgencyAndId tripId = new AgencyAndId(feedId, "1.1");
AgencyAndId tripId2 = new AgencyAndId(feedId, "1.2");
Trip trip = graph.index.tripForId.get(tripId);
TripPattern pattern = graph.index.patternForTrip.get(trip);
int tripIndex = pattern.scheduledTimetable.getTripIndex(tripId);
int tripIndex2 = pattern.scheduledTimetable.getTripIndex(tripId2);
=======
final AgencyAndId tripId = new AgencyAndId("agency", "1.1");
final AgencyAndId tripId2 = new AgencyAndId("agency", "1.2");
final Trip trip = graph.index.tripForId.get(tripId);
final TripPattern pattern = graph.index.patternForTrip.get(trip);
final int tripIndex = pattern.scheduledTimetable.getTripIndex(tripId);
final int tripIndex2 = pattern.scheduledTimetable.getTripIndex(tripId2);
>>>>>>>
final AgencyAndId tripId = new AgencyAndId(feedId, "1.1");
final AgencyAndId tripId2 = new AgencyAndId(feedId, "1.2");
final Trip trip = graph.index.tripForId.get(tripId);
final TripPattern pattern = graph.index.patternForTrip.get(trip);
final int tripIndex = pattern.scheduledTimetable.getTripIndex(tripId);
final int tripIndex2 = pattern.scheduledTimetable.getTripIndex(tripId2);
<<<<<<<
AgencyAndId tripId = new AgencyAndId(feedId, "1.1");
AgencyAndId tripId2 = new AgencyAndId(feedId, "1.2");
Trip trip = graph.index.tripForId.get(tripId);
TripPattern pattern = graph.index.patternForTrip.get(trip);
int tripIndex = pattern.scheduledTimetable.getTripIndex(tripId);
int tripIndex2 = pattern.scheduledTimetable.getTripIndex(tripId2);
=======
final AgencyAndId tripId = new AgencyAndId("agency", "1.1");
final AgencyAndId tripId2 = new AgencyAndId("agency", "1.2");
final Trip trip = graph.index.tripForId.get(tripId);
final TripPattern pattern = graph.index.patternForTrip.get(trip);
final int tripIndex = pattern.scheduledTimetable.getTripIndex(tripId);
final int tripIndex2 = pattern.scheduledTimetable.getTripIndex(tripId2);
>>>>>>>
final AgencyAndId tripId = new AgencyAndId(feedId, "1.1");
final AgencyAndId tripId2 = new AgencyAndId(feedId, "1.2");
final Trip trip = graph.index.tripForId.get(tripId);
final TripPattern pattern = graph.index.patternForTrip.get(trip);
final int tripIndex = pattern.scheduledTimetable.getTripIndex(tripId);
final int tripIndex2 = pattern.scheduledTimetable.getTripIndex(tripId2);
<<<<<<<
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), feedId);
=======
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), "agency");
>>>>>>>
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), feedId);
<<<<<<<
Stop stopA = graph.index.stopForId.get(new AgencyAndId(feedId, "A"));
TransitStopDepart transitStopDepartA = graph.index.stopVertexForStop.get(stopA).departVertex;
=======
final Stop stopA = graph.index.stopForId.get(new AgencyAndId("agency", "A"));
final TransitStopDepart transitStopDepartA = graph.index.stopVertexForStop.get(stopA).departVertex;
>>>>>>>
Stop stopA = graph.index.stopForId.get(new AgencyAndId(feedId, "A"));
TransitStopDepart transitStopDepartA = graph.index.stopVertexForStop.get(stopA).departVertex;
<<<<<<<
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), feedId);
=======
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), modifiedTripAgency);
>>>>>>>
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), feedId);
<<<<<<<
AgencyAndId tripId = new AgencyAndId(feedId, modifiedTripId);
Trip trip = graph.index.tripForId.get(tripId);
TripPattern originalTripPattern = graph.index.patternForTrip.get(trip);
Timetable originalTimetableForToday = snapshot.resolve(originalTripPattern, serviceDate);
Timetable originalTimetableScheduled = snapshot.resolve(originalTripPattern, null);
=======
final AgencyAndId tripId = new AgencyAndId(modifiedTripAgency, modifiedTripId);
final Trip trip = graph.index.tripForId.get(tripId);
final TripPattern originalTripPattern = graph.index.patternForTrip.get(trip);
final Timetable originalTimetableForToday = snapshot.resolve(originalTripPattern, serviceDate);
final Timetable originalTimetableScheduled = snapshot.resolve(originalTripPattern, null);
>>>>>>>
final AgencyAndId tripId = new AgencyAndId(feedId, modifiedTripId);
final Trip trip = graph.index.tripForId.get(tripId);
final TripPattern originalTripPattern = graph.index.patternForTrip.get(trip);
final Timetable originalTimetableForToday = snapshot.resolve(originalTripPattern, serviceDate);
final Timetable originalTimetableScheduled = snapshot.resolve(originalTripPattern, null);
<<<<<<<
TripPattern newTripPattern = snapshot.getLastAddedTripPattern(feedId, modifiedTripId, serviceDate);
=======
final TripPattern newTripPattern = snapshot.getLastAddedTripPattern(modifiedTripId, serviceDate);
>>>>>>>
final TripPattern newTripPattern = snapshot.getLastAddedTripPattern(feedId, modifiedTripId, serviceDate);
<<<<<<<
AgencyAndId tripId = new AgencyAndId(feedId, "1.1");
ServiceDate previously = serviceDate.previous().previous(); // Just to be safe...
Trip trip = graph.index.tripForId.get(tripId);
TripPattern pattern = graph.index.patternForTrip.get(trip);
=======
final AgencyAndId tripId = new AgencyAndId("agency", "1.1");
final ServiceDate previously = serviceDate.previous().previous(); // Just to be safe...
final Trip trip = graph.index.tripForId.get(tripId);
final TripPattern pattern = graph.index.patternForTrip.get(trip);
>>>>>>>
final AgencyAndId tripId = new AgencyAndId(feedId, "1.1");
final ServiceDate previously = serviceDate.previous().previous(); // Just to be safe...
final Trip trip = graph.index.tripForId.get(tripId);
final TripPattern pattern = graph.index.patternForTrip.get(trip);
<<<<<<<
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(TripUpdate.parseFrom(cancellation)), feedId);
TimetableSnapshot snapshotA = updater.getTimetableSnapshot();
=======
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(TripUpdate.parseFrom(cancellation)), "agency");
final TimetableSnapshot snapshotA = updater.getTimetableSnapshot();
>>>>>>>
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(TripUpdate.parseFrom(cancellation)), feedId);
final TimetableSnapshot snapshotA = updater.getTimetableSnapshot();
<<<<<<<
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), feedId);
TimetableSnapshot snapshotB = updater.getTimetableSnapshot();
=======
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), "agency");
final TimetableSnapshot snapshotB = updater.getTimetableSnapshot();
>>>>>>>
updater.applyTripUpdates(graph, fullDataset, Arrays.asList(tripUpdate), feedId);
final TimetableSnapshot snapshotB = updater.getTimetableSnapshot(); |
<<<<<<<
import org.opentripplanner.standalone.OTPServer;
import org.opentripplanner.standalone.Router;
=======
import org.opentripplanner.standalone.Router;
>>>>>>>
import org.opentripplanner.standalone.Router;
<<<<<<<
@Context
private OTPServer otpServer;
=======
>>>>>>>
<<<<<<<
ZSampleGrid<WTWD> sampleGrid = router.sampleGridRenderer.getSampleGrid(tgRequest, sptRequest);
=======
ZSampleGrid<WTWD> sampleGrid = router.sampleGridRenderer.getSampleGrid(
tgRequest, sptRequest);
>>>>>>>
ZSampleGrid<WTWD> sampleGrid = router.sampleGridRenderer.getSampleGrid(tgRequest, sptRequest);
<<<<<<<
String offRoadDistStr = String.format(Locale.US, "%f",
router.sampleGridRenderer.getOffRoadDistanceMeters(precisionMeters));
=======
String offRoadDistStr = String.format(Locale.US, "%f",
router.sampleGridRenderer
.getOffRoadDistanceMeters(precisionMeters));
>>>>>>>
String offRoadDistStr = String.format(Locale.US, "%f",
router.sampleGridRenderer.getOffRoadDistanceMeters(precisionMeters)); |
<<<<<<<
=======
import com.conveyal.kryo.TIntArrayListSerializer;
import com.conveyal.kryo.TIntIntHashMapSerializer;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.serializers.ExternalizableSerializer;
import com.esotericsoftware.kryo.serializers.JavaSerializer;
>>>>>>>
import com.conveyal.kryo.TIntArrayListSerializer;
import com.conveyal.kryo.TIntIntHashMapSerializer;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.serializers.ExternalizableSerializer;
import com.esotericsoftware.kryo.serializers.JavaSerializer;
<<<<<<<
import org.onebusaway.gtfs.impl.calendar.CalendarServiceImpl;
import org.onebusaway.gtfs.model.Agency;
import org.onebusaway.gtfs.model.AgencyAndId;
import org.onebusaway.gtfs.model.Stop;
import org.onebusaway.gtfs.model.FeedInfo;
import org.onebusaway.gtfs.model.calendar.CalendarServiceData;
import org.onebusaway.gtfs.model.calendar.ServiceDate;
import org.onebusaway.gtfs.services.calendar.CalendarService;
=======
import org.objenesis.strategy.SerializingInstantiatorStrategy;
import org.opentripplanner.calendar.impl.CalendarServiceImpl;
import org.opentripplanner.model.Agency;
import org.opentripplanner.model.FeedScopedId;
import org.opentripplanner.model.Stop;
import org.opentripplanner.model.FeedInfo;
import org.opentripplanner.model.calendar.CalendarServiceData;
import org.opentripplanner.model.calendar.ServiceDate;
import org.opentripplanner.model.CalendarService;
import org.opentripplanner.analyst.core.GeometryIndex;
import org.opentripplanner.analyst.request.SampleFactory;
>>>>>>>
import org.objenesis.strategy.SerializingInstantiatorStrategy;
import org.opentripplanner.calendar.impl.CalendarServiceImpl;
import org.opentripplanner.model.*;
<<<<<<<
private boolean debugData = true;
=======
// TODO this would be more efficient if it was just an array.
private transient Map<Integer, Vertex> vertexById;
private transient Map<Integer, Edge> edgeById;
>>>>>>>
<<<<<<<
public final Deduplicator deduplicator = new Deduplicator();
=======
private transient GeometryIndex geomIndex;
private transient SampleFactory sampleFactory;
public final transient Deduplicator deduplicator = new Deduplicator();
>>>>>>>
public final transient Deduplicator deduplicator = new Deduplicator();
<<<<<<<
out.writeObject(this);
out.writeObject(edges);
if (debugData) {
// should we make debug info generation conditional?
LOG.debug("Writing debug data...");
out.writeObject(this.graphBuilderAnnotations);
} else {
LOG.debug("Skipping debug data.");
}
=======
kryo.writeClassAndObject(output, this);
kryo.writeClassAndObject(output, edges);
output.close();
>>>>>>>
kryo.writeClassAndObject(output, this);
kryo.writeClassAndObject(output, edges);
output.close(); |
<<<<<<<
import org.objenesis.strategy.SerializingInstantiatorStrategy;
import org.onebusaway.gtfs.impl.calendar.CalendarServiceImpl;
import org.onebusaway.gtfs.model.Agency;
import org.onebusaway.gtfs.model.AgencyAndId;
import org.onebusaway.gtfs.model.Stop;
import org.onebusaway.gtfs.model.FeedInfo;
import org.onebusaway.gtfs.model.calendar.CalendarServiceData;
import org.onebusaway.gtfs.model.calendar.ServiceDate;
import org.onebusaway.gtfs.services.calendar.CalendarService;
=======
import org.opentripplanner.calendar.impl.CalendarServiceImpl;
import org.opentripplanner.model.Agency;
import org.opentripplanner.model.FeedScopedId;
import org.opentripplanner.model.Stop;
import org.opentripplanner.model.FeedInfo;
import org.opentripplanner.model.calendar.CalendarServiceData;
import org.opentripplanner.model.calendar.ServiceDate;
import org.opentripplanner.model.CalendarService;
>>>>>>>
import org.objenesis.strategy.SerializingInstantiatorStrategy;
import org.opentripplanner.calendar.impl.CalendarServiceImpl;
import org.opentripplanner.model.Agency;
import org.opentripplanner.model.FeedScopedId;
import org.opentripplanner.model.Stop;
import org.opentripplanner.model.FeedInfo;
import org.opentripplanner.model.calendar.CalendarServiceData;
import org.opentripplanner.model.calendar.ServiceDate;
import org.opentripplanner.model.CalendarService; |
<<<<<<<
private int retryCount = 0;
private final String originalRequestorRef;
public SiriSXUpdater(Parameters config) {
=======
public SiriSXUpdater(SiriSXUpdaterParameters config) {
>>>>>>>
private int retryCount = 0;
private final String originalRequestorRef;
public SiriSXUpdater(SiriSXUpdaterParameters config) {
<<<<<<<
if (requestorRef == null || requestorRef.isEmpty()) {
requestorRef = "otp-"+UUID.randomUUID().toString();
}
//Keeping original requestorRef use as base for updated requestorRef to be used in retries
this.originalRequestorRef = requestorRef;
this.url = url;// + uniquenessParameter;
=======
>>>>>>> |
<<<<<<<
import java.io.File;
import java.util.Collection;
=======
import java.io.InputStream;
>>>>>>>
import java.util.Collection;
import java.io.InputStream;
<<<<<<<
=======
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
>>>>>>>
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
<<<<<<<
private File getFileForRouterId(String routerId) {
if (routerId.indexOf("../") != -1) {
LOG.warn("attempt to navigate up the directory hierarchy using a routerId");
return null;
} else {
String path = pathPattern.replace("{}", routerId);
return new File(path, "Graph.obj");
}
}
=======
>>>>>>>
<<<<<<<
@Override
public Collection<String> getGraphIds() {
return graphs.keySet();
}
=======
@Override
public void setResourceLoader(ResourceLoader rl) {
this.resourceLoader = rl;
}
>>>>>>>
@Override
public Collection<String> getGraphIds() {
return graphs.keySet();
}
@Override
public void setResourceLoader(ResourceLoader rl) {
this.resourceLoader = rl;
} |
<<<<<<<
import com.conveyal.geojson.GeometryDeserializer;
import com.conveyal.geojson.GeometrySerializer;
=======
import java.util.Date;
import java.util.HashSet;
import java.util.Optional;
>>>>>>>
import com.conveyal.geojson.GeometryDeserializer;
import com.conveyal.geojson.GeometrySerializer;
import java.util.Date;
import java.util.HashSet;
import java.util.Optional; |
<<<<<<<
} else if (sourceType.equals("uip-bike")) {
source = new UIPBikeRentalDataSource(apiKey);
=======
} else if (sourceType.equals("gbfs")) {
source = new GbfsBikeRentalDataSource();
>>>>>>>
} else if (sourceType.equals("uip-bike")) {
source = new UIPBikeRentalDataSource(apiKey);
} else if (sourceType.equals("gbfs")) {
source = new GbfsBikeRentalDataSource(); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.