conflict_resolution
stringlengths
27
16k
<<<<<<< import edu.illinois.library.cantaloupe.Application; ======= import edu.illinois.library.cantaloupe.Application; import org.apache.commons.configuration.Configuration; import org.restlet.data.CacheDirective; >>>>>>> import edu.illinois.library.cantaloupe.Application; import org.apache.commons.configuration.Configuration; import org.restlet.data.CacheDirective; <<<<<<< @Override protected void doInit() throws ResourceException { super.doInit(); // override the Server header this.getServerInfo().setAgent("Cantaloupe/" + Application.getVersion()); } ======= private static Logger logger = LoggerFactory. getLogger(AbstractResource.class); protected static List<CacheDirective> getCacheDirectives() { List<CacheDirective> directives = new ArrayList<>(); try { Configuration config = Application.getConfiguration(); String maxAge = config.getString("cache.client.max_age"); if (maxAge != null && maxAge.length() > 0) { directives.add(CacheDirective.maxAge(Integer.parseInt(maxAge))); } String sMaxAge = config.getString("cache.client.shared_max_age"); if (sMaxAge != null && sMaxAge.length() > 0) { directives.add(CacheDirective. sharedMaxAge(Integer.parseInt(sMaxAge))); } if (config.getBoolean("cache.client.public")) { directives.add(CacheDirective.publicInfo()); } else if (config.getBoolean("cache.client.private")) { directives.add(CacheDirective.privateInfo()); } if (config.getBoolean("cache.client.no_cache")) { directives.add(CacheDirective.noCache()); } if (config.getBoolean("cache.client.no_store")) { directives.add(CacheDirective.noStore()); } if (config.getBoolean("cache.client.must_revalidate")) { directives.add(CacheDirective.mustRevalidate()); } if (config.getBoolean("cache.client.proxy_revalidate")) { directives.add(CacheDirective.proxyMustRevalidate()); } if (config.getBoolean("cache.client.no_transform")) { directives.add(CacheDirective.noTransform()); } } catch (NoSuchElementException e) { logger.warn("Configuration file is missing one or more " + "cache.client.* keys. Cache-Control headers are disabled. " + "Original error: {}", e.getMessage()); } return directives; } >>>>>>> private static Logger logger = LoggerFactory. getLogger(AbstractResource.class); protected static List<CacheDirective> getCacheDirectives() { List<CacheDirective> directives = new ArrayList<>(); try { Configuration config = Application.getConfiguration(); String maxAge = config.getString("cache.client.max_age"); if (maxAge != null && maxAge.length() > 0) { directives.add(CacheDirective.maxAge(Integer.parseInt(maxAge))); } String sMaxAge = config.getString("cache.client.shared_max_age"); if (sMaxAge != null && sMaxAge.length() > 0) { directives.add(CacheDirective. sharedMaxAge(Integer.parseInt(sMaxAge))); } if (config.getBoolean("cache.client.public")) { directives.add(CacheDirective.publicInfo()); } else if (config.getBoolean("cache.client.private")) { directives.add(CacheDirective.privateInfo()); } if (config.getBoolean("cache.client.no_cache")) { directives.add(CacheDirective.noCache()); } if (config.getBoolean("cache.client.no_store")) { directives.add(CacheDirective.noStore()); } if (config.getBoolean("cache.client.must_revalidate")) { directives.add(CacheDirective.mustRevalidate()); } if (config.getBoolean("cache.client.proxy_revalidate")) { directives.add(CacheDirective.proxyMustRevalidate()); } if (config.getBoolean("cache.client.no_transform")) { directives.add(CacheDirective.noTransform()); } } catch (NoSuchElementException e) { logger.warn("Configuration file is missing one or more " + "cache.client.* keys. Cache-Control headers are disabled. " + "Original error: {}", e.getMessage()); } return directives; } @Override protected void doInit() throws ResourceException { super.doInit(); // override the Server header this.getServerInfo().setAgent("Cantaloupe/" + Application.getVersion()); }
<<<<<<< tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, filename); ======= tester.testContentDispositionHeaderSetToAttachmentWithFilenameInQuery(uri, expected); >>>>>>> tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, expected); <<<<<<< tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, filename); ======= tester.testContentDispositionHeaderSetToAttachmentWithFilenameInQuery(uri, expected); >>>>>>> tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, expected); <<<<<<< tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, filename); ======= tester.testContentDispositionHeaderSetToAttachmentWithFilenameInQuery(uri, expected); >>>>>>> tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, expected); <<<<<<< tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, filename); ======= tester.testContentDispositionHeaderSetToAttachmentWithFilenameInQuery(uri, expected); >>>>>>> tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, expected);
<<<<<<< validateRequestedArea(ops, sourceFormat, info); try { processor.validate(ops, fullSize); } catch (IllegalArgumentException e) { throw new IllegalClientArgumentException(e.getMessage(), e); } if (config.getBoolean(Key.IIIF_2_RESTRICT_TO_SIZES, false)) { final ImageInfo<String, Object> imageInfo = new ImageInfoFactory().newImageInfo( identifier, null, processor, info); final Dimension resultingSize = ops.getResultingSize(fullSize); boolean ok = false; @SuppressWarnings("unchecked") List<ImageInfo.Size> sizes = (List<ImageInfo.Size>) imageInfo.get("sizes"); for (ImageInfo.Size size : sizes) { if (size.width == resultingSize.width && size.height == resultingSize.height) { ok = true; break; ======= validateRequestedArea(ops, sourceFormat, info); processor.validate(ops, fullSize); if (config.getBoolean(Key.IIIF_2_RESTRICT_TO_SIZES, false)) { final ImageInfo<String, Object> imageInfo = new ImageInfoFactory().newImageInfo( identifier, null, processor, info); final Dimension resultingSize = ops.getResultingSize(fullSize); boolean ok = false; @SuppressWarnings("unchecked") List<ImageInfo.Size> sizes = (List<ImageInfo.Size>) imageInfo.get("sizes"); for (ImageInfo.Size size : sizes) { if (size.width == resultingSize.width && size.height == resultingSize.height) { ok = true; break; } } if (!ok) { throw new SizeRestrictedException(); >>>>>>> validateRequestedArea(ops, sourceFormat, info); try { processor.validate(ops, fullSize); } catch (IllegalArgumentException e) { throw new IllegalClientArgumentException(e.getMessage(), e); } if (config.getBoolean(Key.IIIF_2_RESTRICT_TO_SIZES, false)) { final ImageInfo<String, Object> imageInfo = new ImageInfoFactory().newImageInfo( identifier, null, processor, info); final Dimension resultingSize = ops.getResultingSize(fullSize); boolean ok = false; @SuppressWarnings("unchecked") List<ImageInfo.Size> sizes = (List<ImageInfo.Size>) imageInfo.get("sizes"); for (ImageInfo.Size size : sizes) { if (size.width == resultingSize.width && size.height == resultingSize.height) { ok = true; break; } } if (!ok) { throw new SizeRestrictedException();
<<<<<<< void testStringConstructor() { ======= public void testCopyConstructor() { Query query2 = new Query(instance); assertEquals("value1", query2.getFirstValue("key1")); assertEquals("value2", query2.getFirstValue("key2")); } @Test public void testConstructorWithEmptyString() { instance = new Query(""); assertTrue(instance.isEmpty()); } @Test public void testConstructorWithNonEmptyString() { >>>>>>> void testConstructorWithEmptyString() { instance = new Query(""); assertTrue(instance.isEmpty()); } @Test void testConstructorWithNonEmptyString() {
<<<<<<< public void testCacheHeadersWhenClientCachingIsEnabled() throws Exception { Configuration config = Configuration.getInstance(); ======= public void testCacheHeadersWhenClientCachingIsEnabledAndResponseIsCacheable() throws Exception { Configuration config = ConfigurationFactory.getInstance(); >>>>>>> public void testCacheHeadersWhenClientCachingIsEnabledAndResponseIsCacheable() throws Exception { Configuration config = Configuration.getInstance(); <<<<<<< Identifier identifier = new Identifier(IMAGE); ======= final String imagePath = "/" + IMAGE + "/full/full/0/default.jpg"; final OperationList ops = Parameters.fromUri(imagePath). toOperationList(); new InformationResource(). addNonEndpointOperations(ops, new Dimension(64, 56)); >>>>>>> Identifier identifier = new Identifier(IMAGE); <<<<<<< ======= getClientForUriPath(imagePath).get(); >>>>>>>
<<<<<<< import com.fasterxml.jackson.databind.ObjectMapper; import edu.illinois.library.cantaloupe.RestletApplication; ======= import edu.illinois.library.cantaloupe.WebApplication; import edu.illinois.library.cantaloupe.cache.Cache; >>>>>>> import edu.illinois.library.cantaloupe.RestletApplication; <<<<<<< import edu.illinois.library.cantaloupe.config.Configuration; import edu.illinois.library.cantaloupe.config.Key; ======= import edu.illinois.library.cantaloupe.config.ConfigurationFactory; >>>>>>> import edu.illinois.library.cantaloupe.config.Configuration; import edu.illinois.library.cantaloupe.config.Key; <<<<<<< import edu.illinois.library.cantaloupe.resource.AbstractResource; import edu.illinois.library.cantaloupe.resource.EndpointDisabledException; import edu.illinois.library.cantaloupe.resource.JSONRepresentation; import edu.illinois.library.cantaloupe.processor.ProcessorConnector; ======= import edu.illinois.library.cantaloupe.resource.SourceImageWrangler; >>>>>>> <<<<<<< import org.restlet.representation.EmptyRepresentation; ======= >>>>>>> <<<<<<< private static final org.slf4j.Logger logger = LoggerFactory. getLogger(AdminResource.class); ======= >>>>>>> <<<<<<< if (!Configuration.getInstance().getBoolean(Key.ADMIN_ENABLED, false)) { throw new EndpointDisabledException(); } ======= >>>>>>> <<<<<<< * @return JSON application configuration. <strong>This may contain * sensitive info and must be protected.</strong> * @throws Exception */ @Get("json") public Representation doGetAsJson() throws Exception { return new JSONRepresentation(configurationAsMap()); } /** * Deserializes submitted JSON data and updates the application * configuration instance with it. * * @param rep * @throws IOException */ @Post("json") public Representation doPost(Representation rep) throws IOException { final Configuration config = Configuration.getInstance(); final Map<?, ?> submittedConfig = new ObjectMapper().readValue( rep.getStream(), HashMap.class); // Copy configuration keys and values from the request JSON payload to // the application configuration. for (final Object key : submittedConfig.keySet()) { final Object value = submittedConfig.get(key); logger.debug("Setting {} = {}", key, value); config.setProperty((String) key, value); } config.save(); return new EmptyRepresentation(); } /** * @return Map representation of the application configuration. */ private Map<String,Object> configurationAsMap() { final Configuration config = Configuration.getInstance(); final Map<String,Object> configMap = new HashMap<>(); final Iterator<String> it = config.getKeys(); while (it.hasNext()) { final String key = it.next(); final Object value = config.getProperty(key); configMap.put(key, value); } return configMap; } /** ======= >>>>>>>
<<<<<<< protected static final String RESPONSE_CONTENT_DISPOSITION_QUERY_ARG = "response-content-disposition"; ======= public static final String AUTHORIZATION_DELEGATE_METHOD = "authorized?"; public static final String BASE_URI_CONFIG_KEY = "base_uri"; public static final String CLIENT_CACHE_ENABLED_CONFIG_KEY = "cache.client.enabled"; public static final String CLIENT_CACHE_MAX_AGE_CONFIG_KEY = "cache.client.max_age"; public static final String CLIENT_CACHE_MUST_REVALIDATE_CONFIG_KEY = "cache.client.must_revalidate"; public static final String CLIENT_CACHE_NO_CACHE_CONFIG_KEY = "cache.client.no_cache"; public static final String CLIENT_CACHE_NO_STORE_CONFIG_KEY = "cache.client.no_store"; public static final String CLIENT_CACHE_NO_TRANSFORM_CONFIG_KEY = "cache.client.no_transform"; public static final String CLIENT_CACHE_PRIVATE_CONFIG_KEY = "cache.client.private"; public static final String CLIENT_CACHE_PROXY_REVALIDATE_CONFIG_KEY = "cache.client.proxy_revalidate"; public static final String CLIENT_CACHE_PUBLIC_CONFIG_KEY = "cache.client.public"; public static final String CLIENT_CACHE_SHARED_MAX_AGE_CONFIG_KEY = "cache.client.shared_max_age"; public static final String CONTENT_DISPOSITION_CONFIG_KEY = "endpoint.iiif.content_disposition"; public static final String MAX_PIXELS_CONFIG_KEY = "max_pixels"; public static final String PUBLIC_IDENTIFIER_HEADER = "X-IIIF-ID"; public static final String SLASH_SUBSTITUTE_CONFIG_KEY = "slash_substitute"; >>>>>>> public static final String PUBLIC_IDENTIFIER_HEADER = "X-IIIF-ID"; protected static final String RESPONSE_CONTENT_DISPOSITION_QUERY_ARG = "response-content-disposition"; <<<<<<< /** * @return Identifier component of the URI, decoded and ready for use. */ protected Identifier getIdentifier() { final Map<String,Object> attrs = getRequest().getAttributes(); // Get the raw identifier from the URI. final String urlIdentifier = (String) attrs.get("identifier"); // Decode entities. final String decodedIdentifier = Reference.decode(urlIdentifier); // Decode slash substitutes. final String identifier = decodeSlashes(decodedIdentifier); LOGGER.debug("getIdentifier(): requested: {} / decoded: {} / " + "slashes substituted: {}", urlIdentifier, decodedIdentifier, identifier); return new Identifier(identifier); ======= /** * @return Value of the {@link #PUBLIC_IDENTIFIER_HEADER} header, if * available, or else the <code>identifier</code> URI path * component. */ protected String getPublicIdentifier() { final Map<String,Object> attrs = getRequest().getAttributes(); final String uriID = (String) attrs.get("identifier"); final String decodedID = Reference.decode(uriID); final String reSlashedID = decodeSlashes(decodedID); final String headerID = getRequest().getHeaders().getFirstValue( PUBLIC_IDENTIFIER_HEADER, true); logger.debug("Identifier requested: {} -> decoded: {} -> " + "slashes substituted: {} | {} header: {}", uriID, decodedID, reSlashedID, PUBLIC_IDENTIFIER_HEADER, headerID); return (headerID != null && !headerID.isEmpty()) ? headerID : reSlashedID; } protected ImageRepresentation getRepresentation(OperationList ops, Format sourceFormat, Info info, Disposition disposition, Processor proc) throws IOException, ProcessorException, CacheException { // Max allowed size is ignored when the processing is a no-op. final long maxAllowedSize = (ops.isNoOp(sourceFormat)) ? 0 : Configuration.getInstance().getLong(MAX_PIXELS_CONFIG_KEY, 0); final Dimension effectiveSize = ops.getResultingSize(info.getSize()); if (maxAllowedSize > 0 && effectiveSize.width * effectiveSize.height > maxAllowedSize) { throw new PayloadTooLargeException(); } return new ImageRepresentation(info, proc, ops, disposition, isBypassingCache()); >>>>>>> /** * @return Decoded identifier component of the URI. N.B.: This may not be * the identifier the user supplies or sees; for that, use * {@link #getPublicIdentifier()}. * @see #getPublicIdentifier() */ protected Identifier getIdentifier() { final Map<String,Object> attrs = getRequest().getAttributes(); // Get the raw identifier from the URI. final String urlIdentifier = (String) attrs.get("identifier"); // Decode entities. final String decodedIdentifier = Reference.decode(urlIdentifier); // Decode slash substitutes. final String identifier = decodeSlashes(decodedIdentifier); LOGGER.debug("getIdentifier(): requested: {} / decoded: {} / " + "slashes substituted: {}", urlIdentifier, decodedIdentifier, identifier); return new Identifier(identifier);
<<<<<<< void testSanitizeWithStrings() { assertEquals("", StringUtils.sanitize("dirt", "dirt")); assertEquals("y", StringUtils.sanitize("dirty", "dirt")); assertEquals("dirty", StringUtils.sanitize("dir1ty", "1")); ======= public void testSanitize1() { assertEquals("", StringUtils.sanitize("dirt", "dirt", "dirt")); assertEquals("y", StringUtils.sanitize("dirty", "dirt", "dirt")); assertEquals("dirty", StringUtils.sanitize("dir1ty", "1", "1")); >>>>>>> void testSanitize1() { assertEquals("", StringUtils.sanitize("dirt", "dirt", "dirt")); assertEquals("y", StringUtils.sanitize("dirty", "dirt", "dirt")); assertEquals("dirty", StringUtils.sanitize("dir1ty", "1", "1")); <<<<<<< void testSanitizeWithPatterns() { ======= public void testSanitize2() { >>>>>>> void testSanitize2() { <<<<<<< void testStripEndWithMatch() { ======= public void testSanitize3() { assertEquals("_", StringUtils.sanitize("dirt", "_", Pattern.compile("dirt"))); assertEquals("_y", StringUtils.sanitize("dirty", "_", Pattern.compile("dirt"))); assertEquals("dir_ty", StringUtils.sanitize("dir1ty", "_", Pattern.compile("1"))); // test injection assertEquals("caca_tsts", StringUtils.sanitize("cacacatststs", "_", Pattern.compile("cats"))); assertEquals("cadoca_tsgsts", StringUtils.sanitize("cadocadogstsgsts", "_", Pattern.compile("cats"), Pattern.compile("dogs"))); } @Test public void testStripEndWithMatch() { >>>>>>> void testSanitize3() { assertEquals("_", StringUtils.sanitize("dirt", "_", Pattern.compile("dirt"))); assertEquals("_y", StringUtils.sanitize("dirty", "_", Pattern.compile("dirt"))); assertEquals("dir_ty", StringUtils.sanitize("dir1ty", "_", Pattern.compile("1"))); // test injection assertEquals("caca_tsts", StringUtils.sanitize("cacacatststs", "_", Pattern.compile("cats"))); assertEquals("cadoca_tsgsts", StringUtils.sanitize("cadocadogstsgsts", "_", Pattern.compile("cats"), Pattern.compile("dogs"))); } @Test void testStripEndWithMatch() {
<<<<<<< ======= processor.validate(ops, fullSize); validateScale(info.getOrientationSize(), (Scale) ops.getFirst(Scale.class)); // Find out whether the processor supports the source format by // asking it whether it offers any output formats for it. if (!availableOutputFormats.isEmpty()) { if (!availableOutputFormats.contains(ops.getOutputFormat())) { Exception e = new UnsupportedOutputFormatException( processor, ops.getOutputFormat()); LOGGER.warn("{}: {}", e.getMessage(), getRequest().getReference()); throw e; } } else { throw new UnsupportedSourceFormatException(sourceFormat); } addHeaders(processor, ops.getOutputFormat(), disposition); new ImageRepresentation(info, processor, ops, isBypassingCache()) .write(getResponse().getOutputStream()); // Notify the health checker of a successful response -- after the // response has been written successfully, obviously. HealthChecker.addSourceProcessorPair(source, processor, ops); >>>>>>>
<<<<<<< private Component component; private boolean httpEnabled; private int httpPort; private boolean httpsEnabled; private String httpsKeyPassword; private String httpsKeyStorePassword; private String httpsKeyStorePath; private String httpsKeyStoreType; private int httpsPort; /** * Initializes the instance with defaults from the application * configuration. */ public WebServer() { final Configuration config = Application.getConfiguration(); if (config != null) { httpEnabled = config.getBoolean(HTTP_ENABLED_CONFIG_KEY, false); httpPort = config.getInteger(HTTP_PORT_CONFIG_KEY, 8182); httpsEnabled = config.getBoolean(HTTPS_ENABLED_CONFIG_KEY, false); httpsKeyPassword = config.getString(HTTPS_KEY_PASSWORD_CONFIG_KEY); httpsKeyStorePassword = config.getString(HTTPS_KEY_STORE_PASSWORD_CONFIG_KEY); httpsKeyStorePath = config.getString(HTTPS_KEY_STORE_PATH_CONFIG_KEY); httpsKeyStoreType = config.getString(HTTPS_KEY_STORE_TYPE_CONFIG_KEY); httpsPort = config.getInteger(HTTPS_PORT_CONFIG_KEY, 8183); } } public int getHttpPort() { return httpPort; } public String getHttpsKeyPassword() { return httpsKeyPassword; } public String getHttpsKeyStorePassword() { return httpsKeyStorePassword; } public String getHttpsKeyStorePath() { return httpsKeyStorePath; } public String getHttpsKeyStoreType() { return httpsKeyStoreType; } public int getHttpsPort() { return httpsPort; } public boolean isHttpEnabled() { return httpEnabled; } public boolean isHttpsEnabled() { return httpsEnabled; } public void setHttpEnabled(boolean enabled) { this.httpEnabled = enabled; } public void setHttpPort(int port) { this.httpPort = port; } public void setHttpsEnabled(boolean enabled) { this.httpsEnabled = enabled; } public void setHttpsKeyPassword(String password) { this.httpsKeyPassword = password; } public void setHttpsKeyStorePassword(String password) { this.httpsKeyStorePassword = password; } public void setHttpsKeyStorePath(String path) { this.httpsKeyStorePath = path; } public void setHttpsKeyStoreType(String type) { this.httpsKeyStoreType = type; } public void setHttpsPort(int port) { this.httpsPort = port; } ======= private static final int IDLE_TIMEOUT = 30000; private Server server; >>>>>>> private static final int IDLE_TIMEOUT = 30000; private boolean httpEnabled; private int httpPort; private boolean httpsEnabled; private String httpsKeyPassword; private String httpsKeyStorePassword; private String httpsKeyStorePath; private String httpsKeyStoreType; private int httpsPort; private Server server; /** * Initializes the instance with defaults from the application * configuration. */ public WebServer() { final Configuration config = Application.getConfiguration(); if (config != null) { httpEnabled = config.getBoolean(HTTP_ENABLED_CONFIG_KEY, false); httpPort = config.getInteger(HTTP_PORT_CONFIG_KEY, 8182); httpsEnabled = config.getBoolean(HTTPS_ENABLED_CONFIG_KEY, false); httpsKeyPassword = config.getString(HTTPS_KEY_PASSWORD_CONFIG_KEY); httpsKeyStorePassword = config.getString(HTTPS_KEY_STORE_PASSWORD_CONFIG_KEY); httpsKeyStorePath = config.getString(HTTPS_KEY_STORE_PATH_CONFIG_KEY); httpsKeyStoreType = config.getString(HTTPS_KEY_STORE_TYPE_CONFIG_KEY); httpsPort = config.getInteger(HTTPS_PORT_CONFIG_KEY, 8183); } } public int getHttpPort() { return httpPort; } public String getHttpsKeyPassword() { return httpsKeyPassword; } public String getHttpsKeyStorePassword() { return httpsKeyStorePassword; } public String getHttpsKeyStorePath() { return httpsKeyStorePath; } public String getHttpsKeyStoreType() { return httpsKeyStoreType; } public int getHttpsPort() { return httpsPort; } public boolean isHttpEnabled() { return httpEnabled; } public boolean isHttpsEnabled() { return httpsEnabled; } public void setHttpEnabled(boolean enabled) { this.httpEnabled = enabled; } public void setHttpPort(int port) { this.httpPort = port; } public void setHttpsEnabled(boolean enabled) { this.httpsEnabled = enabled; } public void setHttpsKeyPassword(String password) { this.httpsKeyPassword = password; } public void setHttpsKeyStorePassword(String password) { this.httpsKeyStorePassword = password; } public void setHttpsKeyStorePath(String path) { this.httpsKeyStorePath = path; } public void setHttpsKeyStoreType(String type) { this.httpsKeyStoreType = type; } public void setHttpsPort(int port) { this.httpsPort = port; }
<<<<<<< tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, filename); ======= tester.testContentDispositionHeaderSetToAttachmentWithFilenameInQuery(uri, expected); >>>>>>> tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, expected); <<<<<<< tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, filename); ======= tester.testContentDispositionHeaderSetToAttachmentWithFilenameInQuery(uri, expected); >>>>>>> tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, expected); <<<<<<< tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, filename); ======= tester.testContentDispositionHeaderSetToAttachmentWithFilenameInQuery(uri, expected); >>>>>>> tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, expected); <<<<<<< tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, filename); ======= tester.testContentDispositionHeaderSetToAttachmentWithFilenameInQuery(uri, expected); >>>>>>> tester.testContentDispositionHeaderSetToAttachmentWithFilename(uri, expected);
<<<<<<< lock.writeLock().lock(); LOGGER.info("put(): caching: {}", identifier); ======= >>>>>>>
<<<<<<< ======= assertEquals(0, red); assertEquals(0, green); assertEquals(0, blue); // Test the font color pixel = overlaidImage.getRGB(9, 8); alpha = (pixel >> 24) & 0xff; red = (pixel >> 16) & 0xff; green = (pixel >> 8) & 0xff; blue = (pixel) & 0xff; assertEquals(255, alpha); >>>>>>>
<<<<<<< void testGETNotFound() { ======= public void testGETForbidden() { URI uri = getHTTPURI("/forbidden/info.json"); tester.testForbidden(uri); } @Test public void testGETNotFound() { >>>>>>> void testGETForbidden() { URI uri = getHTTPURI("/forbidden/info.json"); tester.testForbidden(uri); } @Test void testGETNotFound() {
<<<<<<< ======= try { final ReductionFactor reductionFactor = new ReductionFactor(); // If we are normalizing, we need to read the entire image region. final boolean normalize = (opList.getFirst(Normalize.class) != null); final ProcessBuilder pb = getProcessBuilder( opList, imageInfo.getSize(), reductionFactor, normalize); LOGGER.debug("Invoking {}", String.join(" ", pb.command())); final Process process = pb.start(); >>>>>>>
<<<<<<< import java.util.Arrays; ======= import edu.illinois.library.cantaloupe.util.StringUtils; >>>>>>> import edu.illinois.library.cantaloupe.util.StringUtils; import java.util.Arrays;
<<<<<<< void testGETNotFound() { ======= public void testGETForbidden() { URI uri = getHTTPURI("/forbidden/full/full/0/color.jpg"); tester.testForbidden(uri); } @Test public void testGETNotFound() { >>>>>>> void testGETForbidden() { URI uri = getHTTPURI("/forbidden/full/full/0/color.jpg"); tester.testForbidden(uri); } @Test void testGETNotFound() {
<<<<<<< private InputStream docInputStream; ======= private Dimension imageSize; >>>>>>> <<<<<<< ======= private void loadDocument() throws IOException { if (doc == null) { final Stopwatch watch = new Stopwatch(); if (sourceFile != null) { doc = PDDocument.load(sourceFile.toFile()); } else { try (InputStream is = streamSource.newInputStream()) { doc = PDDocument.load(is); } } // Disable the document's cache of PDImageXObjects // See: https://pdfbox.apache.org/2.0/faq.html#outofmemoryerror doc.setResourceCache(new DefaultResourceCache() { @Override public void put(COSObject indirect, PDXObject xobject) { // no-op } }); LOGGER.debug("Loaded document in {} msec", watch.timeElapsed()); } } >>>>>>>
<<<<<<< import java.util.EnumSet; ======= import java.util.Arrays; >>>>>>> <<<<<<< ======= /** * @return Whether metadata can be skipped when reading. */ private boolean canIgnoreMetadata() { final Configuration config = Configuration.getInstance(); final boolean preserveMetadata = config.getBoolean( Key.PROCESSOR_PRESERVE_METADATA, false); final boolean respectOrientation = config.getBoolean( Key.PROCESSOR_RESPECT_ORIENTATION, false); return (!preserveMetadata && !respectOrientation); } abstract public boolean canSeek(); >>>>>>> abstract public boolean canSeek();
<<<<<<< instance.setIdentifier(new Identifier(OBJECT_KEY)); ======= instance.setIdentifier(new Identifier(OBJECT_KEY_WITH_CONTENT_TYPE_AND_RECOGNIZED_EXTENSION)); instance.setContext(new RequestContext()); >>>>>>> instance.setIdentifier(new Identifier(OBJECT_KEY_WITH_CONTENT_TYPE_AND_RECOGNIZED_EXTENSION)); <<<<<<< Identifier identifier = new Identifier("bogus"); RequestContext context = new RequestContext(); context.setIdentifier(identifier); DelegateProxyService service = DelegateProxyService.getInstance(); DelegateProxy proxy = service.newDelegateProxy(context); instance.setDelegateProxy(proxy); instance.setIdentifier(identifier); ======= instance.setIdentifier(new Identifier("bogus")); >>>>>>> Identifier identifier = new Identifier("bogus"); RequestContext context = new RequestContext(); context.setIdentifier(identifier); DelegateProxyService service = DelegateProxyService.getInstance(); DelegateProxy proxy = service.newDelegateProxy(context); instance.setDelegateProxy(proxy); instance.setIdentifier(identifier);
<<<<<<< import edu.illinois.library.cantaloupe.processor.ProcessorFactory; import edu.illinois.library.cantaloupe.image.watermark.WatermarkService; ======= import edu.illinois.library.cantaloupe.processor.StreamProcessor; >>>>>>> import edu.illinois.library.cantaloupe.image.watermark.WatermarkService; import edu.illinois.library.cantaloupe.processor.StreamProcessor; <<<<<<< // If the operations are effectively a no-op AND watermarking is // disabled, the source image can be streamed right through. if (this.ops.isNoOp(this.sourceFormat) && !WatermarkService.isEnabled()) { if (this.file != null) { IOUtils.copy(new FileInputStream(this.file), outputStream); ======= // If the operations are effectively a no-op, the source image can // be streamed directly. if (this.ops.isNoOp(processor.getSourceFormat())) { if (processor instanceof FileProcessor) { final File sourceFile = ((FileProcessor) processor).getSourceFile(); final InputStream inputStream = new FileInputStream(sourceFile); IOUtils.copy(inputStream, outputStream); >>>>>>> // If the operations are effectively a no-op AND watermarking is // disabled, the source image can be streamed right through. if (ops.isNoOp(processor.getSourceFormat()) && !WatermarkService.isEnabled()) { if (processor instanceof FileProcessor) { final File sourceFile = ((FileProcessor) processor).getSourceFile(); final InputStream inputStream = new FileInputStream(sourceFile); IOUtils.copy(inputStream, outputStream);
<<<<<<< import static org.junit.jupiter.api.Assertions.*; ======= import java.net.URI; import static org.junit.Assert.*; >>>>>>> import java.net.URI; import static org.junit.jupiter.api.Assertions.*; <<<<<<< void testGETWithEndpointEnabled() throws Exception { ======= public void testGETWithEndpointDisabled() throws Exception { Configuration config = Configuration.getInstance(); config.setProperty(Key.API_ENABLED, false); try { client.send(); fail("Expected exception"); } catch (ResourceException e) { assertEquals(403, e.getStatusCode()); } } /** * The processing pipeline isn't exercised until an image has been * successfully returned from an image endpoint. */ @Test public void testGETWithNoPriorImageRequest() throws Exception { >>>>>>> void testGETWithEndpointDisabled() throws Exception { Configuration config = Configuration.getInstance(); config.setProperty(Key.API_ENABLED, false); try { client.send(); fail("Expected exception"); } catch (ResourceException e) { assertEquals(403, e.getStatusCode()); } } /** * The processing pipeline isn't exercised until an image has been * successfully returned from an image endpoint. */ @Test void testGETWithNoPriorImageRequest() throws Exception { <<<<<<< void testGETWithEndpointDisabled() throws Exception { ======= public void testGETWithPriorImageRequest() throws Exception { >>>>>>> void testGETWithPriorImageRequest() throws Exception {
<<<<<<< public void mainWithPurgeIdentifierArgExits() throws Exception { ======= @Ignore // this test is too timing-sensitive and command-line purging is deprecated in 3.4 anyway public void testMainWithPurgeExpiredCacheArg() throws Exception { >>>>>>> @Ignore // this test is too timing-sensitive and command-line purging is deprecated anyway public void mainWithPurgeIdentifierArgExits() throws Exception {
<<<<<<< ======= /** * @see #getPublicRootReference() */ protected Reference getPublicReference() { final Request request = getRequest(); return getPublicReference(request.getRootRef(), request.getResourceRef(), request.getHeaders()); } /** * @see #getPublicReference() */ protected Reference getPublicRootReference() { final Request request = getRequest(); return getPublicReference(request.getRootRef(), request.getRootRef(), request.getHeaders()); } protected ImageRepresentation getRepresentation(OperationList ops, Format sourceFormat, Info info, Disposition disposition, Processor proc) throws IOException, ProcessorException, CacheException { // Max allowed size is ignored when the processing is a no-op. final long maxAllowedSize = (ops.isNoOp(sourceFormat)) ? 0 : Configuration.getInstance().getLong(MAX_PIXELS_CONFIG_KEY, 0); final Dimension effectiveSize = ops.getResultingSize(info.getSize()); if (maxAllowedSize > 0 && effectiveSize.width * effectiveSize.height > maxAllowedSize) { throw new PayloadTooLargeException(); } return new ImageRepresentation(info, proc, ops, disposition, isBypassingCache()); } >>>>>>> /** * @see #getPublicRootReference() */ protected Reference getPublicReference() { final Request request = getRequest(); return getPublicReference(request.getRootRef(), request.getResourceRef(), request.getHeaders()); } /** * @see #getPublicReference() */ protected Reference getPublicRootReference() { final Request request = getRequest(); return getPublicReference(request.getRootRef(), request.getRootRef(), request.getHeaders()); }
<<<<<<< return new ConcurrentFileOutputStream<>(tempFile, ======= // Create the containing directory. This may throw a // FileAlreadyExistsException for concurrent invocations with the // same argument. Files.createDirectories(tempFile.getParentFile().toPath()); // ops will be removed from this set when the returned output // stream is closed. derivativeImagesBeingWritten.add(ops); return new ConcurrentFileOutputStream(tempFile, >>>>>>> // Create the containing directory. This may throw a // FileAlreadyExistsException for concurrent invocations with the // same argument. Files.createDirectories(tempFile.getParentFile().toPath()); // ops will be removed from this set when the returned output // stream is closed. derivativeImagesBeingWritten.add(ops); return new ConcurrentFileOutputStream<>(tempFile, <<<<<<< // identifier will be removed from this set when the non-null output // stream returned by this method is closed. sourceImagesBeingWritten.add(identifier); LOGGER.info("newSourceImageOutputStream(Identifier): miss; caching {}", ======= logger.info("newSourceImageOutputStream(Identifier): miss; caching {}", >>>>>>> // identifier will be removed from this set when the non-null output // stream returned by this method is closed. sourceImagesBeingWritten.add(identifier); LOGGER.info("newSourceImageOutputStream(Identifier): miss; caching {}", <<<<<<< final File tempFile = sourceImageTempFile(identifier); if (!tempFile.getParentFile().isDirectory()) { if (!tempFile.getParentFile().mkdirs()) { LOGGER.info("newSourceImageOutputStream(Identifier): can't create {}", tempFile.getParentFile()); // We could threw a CacheException here, but it is probably // not necessary as we are likely to get here often during // concurrent invocations. return new NullOutputStream(); } } final File destFile = sourceImageFile(identifier); return new ConcurrentFileOutputStream<>( tempFile, destFile, sourceImagesBeingWritten, identifier); ======= // Create the containing directory. This may throw a // FileAlreadyExistsException for concurrent invocations with the // same argument. Files.createDirectories(tempFile.getParentFile().toPath()); // identifier will be removed from this set when the non-null output // stream returned by this method is closed. sourceImagesBeingWritten.add(identifier); return new ConcurrentFileOutputStream( tempFile, sourceImageFile(identifier), sourceImagesBeingWritten, identifier); } catch (FileAlreadyExistsException e) { // The image either already exists in its complete form, or is // being written by another thread/process. Either way, there is no // need to write over it. logger.debug("newSourceImageOutputStream(OperationList): " + "{} already exists; returning a {}", tempFile.getParentFile(), NullOutputStream.class.getSimpleName()); return new NullOutputStream(); >>>>>>> // Create the containing directory. This may throw a // FileAlreadyExistsException for concurrent invocations with the // same argument. Files.createDirectories(tempFile.getParentFile().toPath()); // identifier will be removed from this set when the non-null output // stream returned by this method is closed. sourceImagesBeingWritten.add(identifier); return new ConcurrentFileOutputStream<>( tempFile, sourceImageFile(identifier), sourceImagesBeingWritten, identifier); } catch (FileAlreadyExistsException e) { // The image either already exists in its complete form, or is // being written by another thread/process. Either way, there is no // need to write over it. LOGGER.debug("newSourceImageOutputStream(OperationList): " + "{} already exists; returning a {}", tempFile.getParentFile(), NullOutputStream.class.getSimpleName()); return new NullOutputStream(); <<<<<<< // When this method is called concurrently for the same info, one // invocation will complete successfully and the other(s) will // produce this, which is fine. LOGGER.debug("put(): {}", e.getMessage()); ======= // When this method runs concurrently with an equal Identifier // argument, all of the other invocations of Files.move() will // throw this, which is fine. logger.debug("put(): failed to move file: {}", e.getMessage()); >>>>>>> // When this method runs concurrently with an equal Identifier // argument, all of the other invocations of Files.move() will // throw this, which is fine. LOGGER.debug("put(): failed to move file: {}", e.getMessage());
<<<<<<< import com.fasterxml.jackson.databind.ObjectMapper; import edu.illinois.library.cantaloupe.RestletApplication; ======= import edu.illinois.library.cantaloupe.WebApplication; >>>>>>> import edu.illinois.library.cantaloupe.RestletApplication; <<<<<<< public void testDoGetAsJson() { Configuration.getInstance().setProperty("test", "cats"); ClientResource client = getClientForUriPath(RestletApplication.ADMIN_PATH, USERNAME, SECRET); client.get(MediaType.APPLICATION_JSON); assertTrue(client.getResponse().getEntityAsText(). contains("\"test\":\"cats\"")); } @Test public void testDoPost() throws Exception { Map<String,Object> entityMap = new HashMap<>(); entityMap.put("test", "cats"); String entityStr = new ObjectMapper().writer().writeValueAsString(entityMap); Representation rep = new StringRepresentation(entityStr, MediaType.APPLICATION_JSON); ClientResource client = getClientForUriPath( RestletApplication.ADMIN_PATH, USERNAME, SECRET); client.post(rep); assertEquals("cats", Configuration.getInstance().getString("test")); } @Test public void testDoPostSavesFile() { // TODO: write this } @Test ======= >>>>>>> <<<<<<< ClientResource client = getClientForUriPath(RestletApplication.ADMIN_PATH, USERNAME, SECRET); ======= ClientResource client = getClientForUriPath( WebApplication.ADMIN_PATH, USERNAME, SECRET); >>>>>>> ClientResource client = getClientForUriPath( RestletApplication.ADMIN_PATH, USERNAME, SECRET);
<<<<<<< /** * A global resolver is specified using the {@link Key#RESOLVER_STATIC} * configuration key. */ STATIC, /** * A resolver specific to the request is acquired from the {@link * DelegateMethod#RESOLVER} delegate method. */ DELEGATE_SCRIPT } ======= private static final Logger LOGGER = LoggerFactory.getLogger(ResolverFactory.class); >>>>>>> /** * A global resolver is specified using the {@link Key#RESOLVER_STATIC} * configuration key. */ STATIC, /** * A resolver specific to the request is acquired from the {@link * DelegateMethod#RESOLVER} delegate method. */ DELEGATE_SCRIPT } <<<<<<< * @throws IllegalArgumentException if the {@literal proxy} argument is * {@literal null} while using {@link * SelectionStrategy#DELEGATE_SCRIPT}. ======= >>>>>>> * @throws IllegalArgumentException if the {@literal proxy} argument is * {@literal null} while using {@link * SelectionStrategy#DELEGATE_SCRIPT}. <<<<<<< DelegateProxy proxy) throws Exception { final Configuration config = Configuration.getInstance(); switch (getSelectionStrategy()) { case DELEGATE_SCRIPT: if (proxy == null) { throw new IllegalArgumentException("The " + DelegateProxy.class.getSimpleName() + " argument must be non-null when using " + getSelectionStrategy() + "."); } Resolver resolver = newDynamicResolver(identifier, proxy); LOGGER.info("{}() returned a {} for {}", DelegateMethod.RESOLVER, resolver.getClass().getSimpleName(), identifier); return resolver; default: final String resolverName = config.getString(Key.RESOLVER_STATIC); if (resolverName != null) { return newResolver(resolverName, identifier, proxy); } else { throw new ConfigurationException(Key.RESOLVER_STATIC + " is not set to a valid resolver."); } ======= RequestContext context) throws Exception { switch (getSelectionStrategy()) { case DELEGATE_SCRIPT: Resolver resolver = newDynamicResolver(identifier, context); LOGGER.info("{}() returned a {} for {}", RESOLVER_CHOOSER_DELEGATE_METHOD, resolver.getClass().getSimpleName(), identifier); return resolver; default: final Configuration config = Configuration.getInstance(); final String resolverName = config.getString(Key.RESOLVER_STATIC); if (resolverName != null) { return newResolver(resolverName, identifier, context); } else { throw new ConfigurationException(Key.RESOLVER_STATIC + " is not set to a valid resolver."); } >>>>>>> DelegateProxy proxy) throws Exception { switch (getSelectionStrategy()) { case DELEGATE_SCRIPT: if (proxy == null) { throw new IllegalArgumentException("The " + DelegateProxy.class.getSimpleName() + " argument must be non-null when using " + getSelectionStrategy() + "."); } Resolver resolver = newDynamicResolver(identifier, proxy); LOGGER.info("{}() returned a {} for {}", DelegateMethod.RESOLVER, resolver.getClass().getSimpleName(), identifier); return resolver; default: final Configuration config = Configuration.getInstance(); final String resolverName = config.getString(Key.RESOLVER_STATIC); if (resolverName != null) { return newResolver(resolverName, identifier, proxy); } else { throw new ConfigurationException(Key.RESOLVER_STATIC + " is not set to a valid resolver."); } <<<<<<< DelegateProxy proxy) throws Exception { Class<?> class_ = Class.forName( ResolverFactory.class.getPackage().getName() + "." + name); ======= RequestContext context) throws Exception { // If the name contains a dot, assume it's a full class name, // including package. Otherwise, assume it's a simple class name in // this package. String fullName = name.contains(".") ? name : ResolverFactory.class.getPackage().getName() + "." + name; Class<?> class_ = Class.forName(fullName); >>>>>>> DelegateProxy proxy) throws Exception { // If the name contains a dot, assume it's a full class name, // including package. Otherwise, assume it's a simple class name in // this package. String fullName = name.contains(".") ? name : ResolverFactory.class.getPackage().getName() + "." + name; Class<?> class_ = Class.forName(fullName);
<<<<<<< ======= * 4.7. "When the client requests an image, the server may add a link * header to the response that indicates the canonical URI for that * request." */ @Test public void testCanonicalUriLinkHeader() throws Exception { final String path = "/" + IMAGE + "/pct:50,50,50,50/,50/0/default.jpg"; final URI uri = getHTTPURI(path); final String uriStr = uri.toString(); final String expectedURI = uriStr.substring(0, uriStr.indexOf(IMAGE.toString()) + IMAGE.toString().length()) + "/32,28,32,28/57,/0/default.jpg"; client = newClient(path); Response response = client.send(); assertEquals("<" + expectedURI + ">;rel=\"canonical\"", response.getHeaders().getFirstValue("Link")); } /** >>>>>>> * 4.7. "When the client requests an image, the server may add a link * header to the response that indicates the canonical URI for that * request." */ @Test void testCanonicalUriLinkHeader() throws Exception { final String path = "/" + IMAGE + "/pct:50,50,50,50/,50/0/default.jpg"; final URI uri = getHTTPURI(path); final String uriStr = uri.toString(); final String expectedURI = uriStr.substring(0, uriStr.indexOf(IMAGE.toString()) + IMAGE.toString().length()) + "/32,28,32,28/57,/0/default.jpg"; client = newClient(path); Response response = client.send(); assertEquals("<" + expectedURI + ">;rel=\"canonical\"", response.getHeaders().getFirstValue("Link")); } /**
<<<<<<< ======= public static final String CONTENT_DISPOSITION_CONFIG_KEY = "endpoint.iiif.content_disposition"; public static final String RESTRICT_TO_SIZES_CONFIG_KEY = "endpoint.iiif.2.restrict_to_sizes"; >>>>>>> <<<<<<< final Identifier identifier = getIdentifier(); // Assemble the URI parameters into a Parameters object. ======= final String urlIdentifier = (String) attrs.get("identifier"); final String decodedIdentifier = Reference.decode(urlIdentifier); final String reSlashedIdentifier = decodeSlashes(decodedIdentifier); final Identifier identifier = new Identifier(reSlashedIdentifier); // Assemble the URI parameters into a Parameters object >>>>>>> final Identifier identifier = getIdentifier(); // Assemble the URI parameters into a Parameters object. <<<<<<< getPublicRootRef(getRequest().getRootRef(), headers), RestletApplication.IIIF_2_PATH, paramsStr)); ======= getPublicRootRef(getRequest().getRootRef(), requestHeaders), WebApplication.IIIF_2_PATH, paramsStr)); >>>>>>> getPublicRootRef(getRequest().getRootRef(), requestHeaders), RestletApplication.IIIF_2_PATH, paramsStr));
<<<<<<< private static InitializationException initializationException; private static boolean isInitialized = false; /** ImageMagick 7 uses a `magick` command. Earlier versions use `convert` and `identify`. */ private static AtomicBoolean isUsingVersion7; ======= // ImageMagick 7 uses a `magick` command. Earlier versions use `convert` // and `identify`. IM7 may provide aliases for these. private static final AtomicBoolean hasCheckedVersion = new AtomicBoolean(false); private static boolean isUsingVersion7 = false; >>>>>>> // ImageMagick 7 uses a `magick` command. Earlier versions use `convert` // and `identify`. IM7 may provide aliases for these. private static final AtomicBoolean hasCheckedVersion = new AtomicBoolean(false); private static InitializationException initializationException; private static boolean isInitialized = false; private static boolean isUsingVersion7 = false; <<<<<<< static synchronized boolean isUsingVersion7() { if (isUsingVersion7 == null) { final ProcessBuilder pb = new ProcessBuilder(); final List<String> command = new ArrayList<>(); command.add(getPath("magick")); pb.command(command); try { isUsingVersion7 = new AtomicBoolean(false); final String commandString = String.join(" ", pb.command()); LOGGER.debug("isUsingVersion7(): trying to invoke {}", commandString); final Process process = pb.start(); process.waitFor(); LOGGER.info("isUsingVersion7(): found magick command; " + "assuming ImageMagick 7+"); isUsingVersion7.set(true); } catch (Exception e) { LOGGER.info("isUsingVersion7(): couldn't find magick " + "command; assuming ImageMagick <7"); LOGGER.warn("ImageMagick <7 support is DEPRECATED. " + "Please upgrade to version 7."); isUsingVersion7.set(false); ======= private static boolean isUsingVersion7() { if (!hasCheckedVersion.get()) { synchronized (ImageMagickProcessor.class) { final ProcessBuilder pb = new ProcessBuilder(); final List<String> command = new ArrayList<>(); command.add(getPath("magick")); pb.command(command); try { isUsingVersion7 = false; final String commandString = StringUtils.join(pb.command(), " "); logger.debug("isUsingVersion7(): trying to invoke {}", commandString); final Process process = pb.start(); process.waitFor(); logger.info("isUsingVersion7(): found magick command; " + "assuming ImageMagick 7+"); isUsingVersion7 = true; } catch (Exception e) { logger.info("isUsingVersion7(): couldn't find magick " + "command; assuming ImageMagick <7"); isUsingVersion7 = false; } >>>>>>> static boolean isUsingVersion7() { if (!hasCheckedVersion.get()) { synchronized (ImageMagickProcessor.class) { final ProcessBuilder pb = new ProcessBuilder(); final List<String> command = new ArrayList<>(); command.add(getPath("magick")); pb.command(command); try { isUsingVersion7 = false; final String commandString = StringUtils.join(pb.command(), " "); LOGGER.debug("isUsingVersion7(): trying to invoke {}", commandString); final Process process = pb.start(); process.waitFor(); LOGGER.info("isUsingVersion7(): found magick command; " + "assuming ImageMagick 7+"); isUsingVersion7 = true; } catch (Exception e) { LOGGER.info("isUsingVersion7(): couldn't find magick " + "command; assuming ImageMagick <7"); isUsingVersion7 = false; }
<<<<<<< log.errorf("%s times attempts to get a wx access_token , but all failed!", retryTimes); throw e; ======= throw Lang.wrapThrow(e); >>>>>>> log.errorf("%s times attempts to get a wx access_token , but all failed!", retryTimes); throw Lang.wrapThrow(e);
<<<<<<< import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; ======= >>>>>>> import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; <<<<<<< import com.easyfitness.DAO.Profile; import com.easyfitness.MainActivity; import com.easyfitness.R; ======= >>>>>>> <<<<<<< ======= import com.easyfitness.DAO.Profil; import com.easyfitness.R; >>>>>>> import com.easyfitness.DAO.Profil; import com.easyfitness.R; <<<<<<< machineDelete = (ImageButton) view.findViewById(R.id.action_machine_delete); machineSave = (ImageButton) view.findViewById(R.id.action_machine_save); machineSave.setVisibility(View.GONE); // Hide Save button by default ======= machineAction = (FloatingActionButton) view.findViewById(R.id.actionCamera); >>>>>>> machineDelete = (ImageButton) view.findViewById(R.id.action_machine_delete); machineSave = (ImageButton) view.findViewById(R.id.action_machine_save); machineSave.setVisibility(View.GONE); // Hide Save button by default machineAction = (FloatingActionButton) view.findViewById(R.id.actionCamera);
<<<<<<< project.getNodeRoot().findAll(mockActionContext("branch=" + initialBranch.getName()), new PagingParametersImpl(1, 10000L, "name", SortOrder.ASCENDING)).forEach(node -> nodes.add(node)); assertThat(nodes).as("Nodes in initial branch").usingElementComparatorOnFields("uuid").doesNotContain(subFolder, subSubFolder); ======= project.getNodeRoot().findAll(mockActionContext("release=" + initialBranch.getName()), new PagingParametersImpl(1, 10000, "name", SortOrder.ASCENDING)).forEach(node -> nodes.add(node)); assertThat(nodes).as("Nodes in initial release").usingElementComparatorOnFields("uuid").doesNotContain(subFolder, subSubFolder); >>>>>>> project.getNodeRoot().findAll(mockActionContext("release=" + initialBranch.getName()), new PagingParametersImpl(1, 10000, "name", SortOrder.ASCENDING)).forEach(node -> nodes.add(node)); assertThat(nodes).as("Nodes in initial branch").usingElementComparatorOnFields("uuid").doesNotContain(subFolder, subSubFolder); <<<<<<< project.getNodeRoot().findAll(mockActionContext("version=draft"), new PagingParametersImpl(1, 10000L, null, SortOrder.UNSORTED)) .forEach(node -> nodeUuids.add(node.getUuid())); ======= project.getNodeRoot().findAll(mockActionContext("version=draft"), new PagingParametersImpl(1, 10000, null, SortOrder.UNSORTED)) .forEach(node -> nodeUuids.add(node.getUuid())); >>>>>>> project.getNodeRoot().findAll(mockActionContext("version=draft"), new PagingParametersImpl(1, 10000, null, SortOrder.UNSORTED)) .forEach(node -> nodeUuids.add(node.getUuid())); <<<<<<< project.getNodeRoot().findAll(mockActionContext("version=published"), new PagingParametersImpl(1, 10000L, null, SortOrder.UNSORTED)) .forEach(node -> nodeUuids.add(node.getUuid())); ======= project.getNodeRoot().findAll(mockActionContext("version=published"), new PagingParametersImpl(1, 10000, null, SortOrder.UNSORTED)) .forEach(node -> nodeUuids.add(node.getUuid())); >>>>>>> project.getNodeRoot().findAll(mockActionContext("version=published"), new PagingParametersImpl(1, 10000, null, SortOrder.UNSORTED)) .forEach(node -> nodeUuids.add(node.getUuid())); <<<<<<< project.getNodeRoot().findAll(mockActionContext("version=draft"), new PagingParametersImpl(1, 10000L, null, UNSORTED)).forEach( node -> nodeUuids.add(node.getUuid())); ======= project.getNodeRoot().findAll(mockActionContext("version=draft"), new PagingParametersImpl(1, 10000, null, UNSORTED)).forEach( node -> nodeUuids.add(node.getUuid())); >>>>>>> project.getNodeRoot().findAll(mockActionContext("version=draft"), new PagingParametersImpl(1, 10000, null, UNSORTED)).forEach( node -> nodeUuids.add(node.getUuid())); <<<<<<< project.getNodeRoot().findAll(mockActionContext("version=published"), new PagingParametersImpl(1, 10000L, null, UNSORTED)).forEach( node -> nodeUuids.add(node.getUuid())); ======= project.getNodeRoot().findAll(mockActionContext("version=published"), new PagingParametersImpl(1, 10000, null, UNSORTED)).forEach( node -> nodeUuids.add(node.getUuid())); >>>>>>> project.getNodeRoot().findAll(mockActionContext("version=published"), new PagingParametersImpl(1, 10000, null, UNSORTED)).forEach( node -> nodeUuids.add(node.getUuid()));
<<<<<<< Objects.requireNonNull(tagFamilyUuid, "tagFamilyUuid must not be null"); return invokeRequest(GET, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags" + getQuery(parameters), TagListResponse.class); ======= Objects.requireNonNull(uuid, "uuid must not be null"); return handleRequest(GET, "/" + projectName + "/tagFamilies/" + uuid + "/tags" + getQuery(parameters), TagListResponse.class); >>>>>>> Objects.requireNonNull(tagFamilyUuid, "tagFamilyUuid must not be null"); return handleRequest(GET, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags" + getQuery(parameters), TagListResponse.class); <<<<<<< return invokeRequest(POST, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags", TagResponse.class, tagCreateRequest); ======= return handleRequest(POST, "/" + projectName + "/tags", TagResponse.class, tagCreateRequest); >>>>>>> return handleRequest(POST, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags", TagResponse.class, tagCreateRequest); <<<<<<< return invokeRequest(GET, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags/" + uuid + getQuery(parameters), TagResponse.class); ======= return handleRequest(GET, "/" + projectName + "/tags/" + uuid + getQuery(parameters), TagResponse.class); >>>>>>> return handleRequest(GET, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags/" + uuid + getQuery(parameters), TagResponse.class); <<<<<<< return invokeRequest(PUT, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags/" + uuid, TagResponse.class, tagUpdateRequest); ======= return handleRequest(PUT, "/" + projectName + "/tags/" + uuid, TagResponse.class, tagUpdateRequest); >>>>>>> return handleRequest(PUT, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags/" + uuid, TagResponse.class, tagUpdateRequest); <<<<<<< return invokeRequest(DELETE, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags/" + uuid, GenericMessageResponse.class); ======= return handleRequest(DELETE, "/" + projectName + "/tags/" + uuid, GenericMessageResponse.class); >>>>>>> return handleRequest(DELETE, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags/" + uuid, GenericMessageResponse.class); <<<<<<< return invokeRequest(GET, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags/" + tagUuid + "/nodes" + getQuery(parameters), NodeListResponse.class); ======= return handleRequest(GET, "/" + projectName + "/tags/" + tagUuid + "/nodes" + getQuery(parameters), NodeListResponse.class); >>>>>>> return handleRequest(GET, "/" + projectName + "/tagFamilies/" + tagFamilyUuid + "/tags/" + tagUuid + "/nodes" + getQuery(parameters), NodeListResponse.class); <<<<<<< String requestUri = BASEURI + "/" + projectName + "/webroot/" + path + getQuery(parameters); MeshResponseHandler<Object> handler = new MeshResponseHandler<>(Object.class, this, HttpMethod.GET, requestUri); ======= try { path = URLEncoder.encode(path, "UTF-8"); } catch (UnsupportedEncodingException e) { return Future.failedFuture(e); } String requestUri = BASEURI +"/" + projectName + "/webroot/" + path + getQuery(parameters); MeshResponseHandler<Object> handler = new MeshResponseHandler<>(Object.class, HttpMethod.GET, requestUri, getClientSchemaStorage()); >>>>>>> String requestUri = BASEURI + "/" + projectName + "/webroot/" + path + getQuery(parameters); MeshResponseHandler<Object> handler = new MeshResponseHandler<>(Object.class, HttpMethod.GET, requestUri, getClientSchemaStorage()); <<<<<<< return invokeRequest(POST, "/" + projectName + "/nodes/" + nodeUuid + "/languages/" + languageTag + "/fields/" + fieldKey, GenericMessageResponse.class, multiPartFormData, bodyContentType); ======= return handleRequest(POST, "/" + projectName + "/nodes/" + nodeUuid + "/bin", GenericMessageResponse.class, multiPartFormData, bodyContentType); >>>>>>> return handleRequest(POST, "/" + projectName + "/nodes/" + nodeUuid + "/languages/" + languageTag + "/fields/" + fieldKey, GenericMessageResponse.class, multiPartFormData, bodyContentType);
<<<<<<< import com.gentics.mesh.core.data.GraphFieldContainerEdge.Type; ======= import com.gentics.mesh.context.InternalActionContext; >>>>>>> import com.gentics.mesh.core.data.GraphFieldContainerEdge.Type; import com.gentics.mesh.context.InternalActionContext; <<<<<<< import com.gentics.mesh.graphdb.spi.TrxHandler; import com.gentics.mesh.handler.InternalActionContext; ======= import com.gentics.mesh.core.verticle.handler.HandlerUtilities; >>>>>>> import com.gentics.mesh.graphdb.spi.TrxHandler; import com.gentics.mesh.core.verticle.handler.HandlerUtilities; <<<<<<< PageImpl<? extends Node> page = node.getChildren(ac.getUser(), ac.getSelectedLanguageTags(), ac.getRelease().getUuid(), Type.forVersion(ac.getVersion()), ac.getPagingParameter()); return page.transformToRest(ac); ======= PageImpl<? extends Node> page = node.getChildren(ac.getUser(), ac.getSelectedLanguageTags(), ac.getPagingParameter()); return page.transformToRest(ac, 0); >>>>>>> PageImpl<? extends Node> page = node.getChildren(ac.getUser(), ac.getSelectedLanguageTags(), ac.getRelease().getUuid(), Type.forVersion(ac.getVersion()), ac.getPagingParameter()); return page.transformToRest(ac, 0); <<<<<<< // TODO get release specific containers SearchQueueBatch batch = node.addIndexBatch(UPDATE_ACTION); ======= SearchQueueBatch batch = node.createIndexBatch(STORE_ACTION); >>>>>>> SearchQueueBatch batch = node.createIndexBatch(STORE_ACTION); <<<<<<< // TODO get release specific containers SearchQueueBatch batch = node.addIndexBatch(SearchQueueEntryAction.UPDATE_ACTION); ======= SearchQueueBatch batch = node.createIndexBatch(STORE_ACTION); >>>>>>> // TODO get release specific containers SearchQueueBatch batch = node.createIndexBatch(STORE_ACTION);
<<<<<<< String uuid = db().tx(() -> folder("news").getUuid()); VersionReference version = db().tx(() -> { ======= String uuid = db().noTx(() -> folder("news").getUuid()); String version = db().noTx(() -> { >>>>>>> String uuid = db().tx(() -> folder("news").getUuid()); String version = db().tx(() -> { <<<<<<< () -> client().transformNodeBinaryField(PROJECT_NAME, uuid, "en", version.getNumber(), "image", params)); assertEquals("The image should have been resized", 100, transformResponse.getFields().getBinaryField("image").getWidth().intValue()); ======= () -> client().transformNodeBinaryField(PROJECT_NAME, uuid, "en", version, "image", params)); assertEquals("The image should have been resized", 100, transformResponse.getFields() .getBinaryField("image") .getWidth() .intValue()); >>>>>>> () -> client().transformNodeBinaryField(PROJECT_NAME, uuid, "en", version, "image", params)); assertEquals("The image should have been resized", 100, transformResponse.getFields().getBinaryField("image").getWidth().intValue()); <<<<<<< call(() -> client().transformNodeBinaryField(PROJECT_NAME, node.getUuid(), "en", response.getVersion().getNumber(), "image", params), BAD_REQUEST, "error_no_image_transformation", "image"); ======= call(() -> client().transformNodeBinaryField(PROJECT_NAME, node.getUuid(), "en", response.getVersion(), "image", params), BAD_REQUEST, "error_no_image_transformation", "image"); >>>>>>> call(() -> client().transformNodeBinaryField(PROJECT_NAME, node.getUuid(), "en", response.getVersion(), "image", params), BAD_REQUEST, "error_no_image_transformation", "image"); <<<<<<< version = node.getGraphFieldContainer("en").getVersion(); tx.success(); ======= VersionNumber version = node.getGraphFieldContainer("en") .getVersion(); NodeResponse response = call(() -> client().updateNodeBinaryField(PROJECT_NAME, node.getUuid(), "en", version.toString(), "image", Buffer.buffer("I am not an image"), "test.txt", "text/plain")); ImageManipulationParameters params = new ImageManipulationParametersImpl().setWidth(100); call(() -> client().transformNodeBinaryField(PROJECT_NAME, node.getUuid(), "en", response.getVersion(), "image", params), BAD_REQUEST, "error_transformation_non_image", "image"); >>>>>>> version = node.getGraphFieldContainer("en").getVersion(); tx.success();
<<<<<<< public void setAuthentication(JWTAuthentication authentication) { ======= /** * Set the authentication provider. * * @param authentication */ public void setAuthentication(MeshRestClientAuthenticationProvider authentication) { >>>>>>> /** * Set the authentication provider. * * @param authentication */ public void setAuthentication(JWTAuthentication authentication) { <<<<<<< public JWTAuthentication getAuthentication() { return authentication; } ======= /** * Prepare the request using the provides information and return a mesh request which is ready to be invoked. * * @param method * Http method * @param path * Request path * @param classOfT * POJO class for the response * @param bodyData * Buffer which contains the body data which should be send to the server * @param contentType * Content type of the posted data * @return */ >>>>>>> /** * Get the authentication provider. * * @return */ public JWTAuthentication getAuthentication() { return authentication; } /** * Prepare the request using the provides information and return a mesh request which is ready to be invoked. * * @param method * Http method * @param path * Request path * @param classOfT * POJO class for the response * @param bodyData * Buffer which contains the body data which should be send to the server * @param contentType * Content type of the posted data * @return */
<<<<<<< // TODO add filtering for query nodes ======= // set the languageTags as context parameters for the search gc.getNodeParameters().setLanguages(languageTags.stream().toArray(String[]::new)); >>>>>>> // TODO add filtering for query nodes gc.getNodeParameters().setLanguages(languageTags.stream().toArray(String[]::new)); <<<<<<< return fetchFilteredNodes(env); ======= NodeRoot nodeRoot = gc.getProject().getNodeRoot(); TransformablePage<? extends Node> nodes = nodeRoot.findAll(gc, pagingInfo); // Now lets try to load the containers for those found nodes and apply the language fallback List<NodeContent> contents = nodes.getWrappedList().stream().map(node -> { NodeGraphFieldContainer container = node.findVersion(gc, languageTags); return new NodeContent(node, container); }).collect(Collectors.toList()); return new WrappedPageImpl<NodeContent>(contents, nodes); >>>>>>> return fetchFilteredNodes(env);
<<<<<<< import org.n52.sos.ds.hibernate.entities.observation.ereporting.EReportingSeries; import org.n52.sos.ogc.gml.AbstractFeature; import org.n52.sos.ogc.gml.time.TimeInstant; import org.n52.sos.ogc.om.OmObservableProperty; ======= import org.n52.sos.ds.hibernate.entities.ereporting.EReportingSeries; >>>>>>> import org.n52.sos.ds.hibernate.entities.observation.ereporting.EReportingSeries;
<<<<<<< import com.gentics.mesh.core.data.project.HibProject; ======= import com.gentics.mesh.core.data.Project; import com.gentics.mesh.core.rest.MeshEvent; >>>>>>> import com.gentics.mesh.core.data.project.HibProject; import com.gentics.mesh.core.rest.MeshEvent; <<<<<<< private static EventAwareCache<String, HibProject> createCache(EventAwareCacheFactory factory) { return factory.<String, HibProject>builder() .events(PROJECT_DELETED, PROJECT_UPDATED) ======= private static EventAwareCache<String, Project> createCache(EventAwareCacheFactory factory) { return factory.<String, Project>builder() .events(EVENTS) >>>>>>> private static EventAwareCache<String, HibProject> createCache(EventAwareCacheFactory factory) { return factory.<String, HibProject>builder() .events(EVENTS)
<<<<<<< envMap.put(HttpServerConfig.MESH_HTTP_CORS_ORIGIN_PATTERN_ENV, "*"); envMap.put(HttpServerConfig.MESH_HTTP_CORS_ENABLE_ENV, "true"); ======= envMap.put(MeshOptions.MESH_CLUSTER_INIT_ENV, "true"); >>>>>>> envMap.put(MeshOptions.MESH_CLUSTER_INIT_ENV, "true"); envMap.put(HttpServerConfig.MESH_HTTP_CORS_ORIGIN_PATTERN_ENV, "*"); envMap.put(HttpServerConfig.MESH_HTTP_CORS_ENABLE_ENV, "true"); <<<<<<< assertTrue(options.getHttpServerOptions().getEnableCors()); assertEquals("*", options.getHttpServerOptions().getCorsAllowedOriginPattern()); ======= assertTrue(options.isInitClusterMode()); >>>>>>> assertTrue(options.isInitClusterMode()); assertTrue(options.getHttpServerOptions().getEnableCors()); assertEquals("*", options.getHttpServerOptions().getCorsAllowedOriginPattern());
<<<<<<< import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import com.gentics.mesh.core.data.GraphFieldContainerEdge.Type; ======= import com.gentics.mesh.context.InternalActionContext; >>>>>>> import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import com.gentics.mesh.core.data.GraphFieldContainerEdge.Type; import com.gentics.mesh.context.InternalActionContext; <<<<<<< import com.gentics.mesh.handler.InternalActionContext; import com.syncleus.ferma.traversals.EdgeTraversal; ======= import com.gentics.mesh.search.index.NodeIndexHandler; >>>>>>> import com.syncleus.ferma.traversals.EdgeTraversal; import com.gentics.mesh.search.index.NodeIndexHandler; <<<<<<< @Override public void setVersion(VersionNumber version) { setProperty(VERSION_PROPERTY_KEY, version.toString()); } @Override public VersionNumber getVersion() { String version = getProperty(VERSION_PROPERTY_KEY); return version == null ? null : new VersionNumber(version); } @Override public NodeGraphFieldContainer getNextVersion() { return out(HAS_VERSION).has(NodeGraphFieldContainerImpl.class).nextOrDefaultExplicit(NodeGraphFieldContainerImpl.class, null); } @Override public void setNextVersion(NodeGraphFieldContainer container) { setSingleLinkOutTo(container.getImpl(), HAS_VERSION); } @Override public NodeGraphFieldContainer getPreviousVersion() { return in(HAS_VERSION).has(NodeGraphFieldContainerImpl.class).nextOrDefaultExplicit(NodeGraphFieldContainerImpl.class, null); } @Override public void clone(NodeGraphFieldContainer container) { List<GraphField> otherFields = container.getFields(container.getSchemaContainerVersion().getSchema()); for (GraphField graphField : otherFields) { graphField.cloneTo(this); } } @Override public boolean isPublished(String releaseUuid) { EdgeTraversal<?, ?, ?> traversal = inE(HAS_FIELD_CONTAINER) .has(GraphFieldContainerEdgeImpl.RELEASE_UUID_KEY, releaseUuid) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, Type.PUBLISHED.getCode()); return traversal.hasNext(); } @Override public void validate() { Schema schema = getSchemaContainerVersion().getSchema(); Map<String, GraphField> fieldsMap = getFields(schema).stream() .collect(Collectors.toMap(GraphField::getFieldKey, Function.identity())); schema.getFields().stream().forEach(fieldSchema -> { GraphField field = fieldsMap.get(fieldSchema.getName()); if (fieldSchema.isRequired() && field == null) { throw error(CONFLICT, "node_error_missing_mandatory_field_value", fieldSchema.getName(), schema.getName()); } if (field != null) { field.validate(); } }); } ======= @Override public void addIndexBatchEntry(SearchQueueBatch batch, SearchQueueEntryAction action) { String indexType = NodeIndexHandler.getDocumentType(getSchemaContainerVersion()); batch.addEntry(getParentNode().getUuid() + "-" + getLanguage().getLanguageTag(), getParentNode().getType(), action, indexType); } >>>>>>> @Override public void setVersion(VersionNumber version) { setProperty(VERSION_PROPERTY_KEY, version.toString()); } @Override public VersionNumber getVersion() { String version = getProperty(VERSION_PROPERTY_KEY); return version == null ? null : new VersionNumber(version); } @Override public NodeGraphFieldContainer getNextVersion() { return out(HAS_VERSION).has(NodeGraphFieldContainerImpl.class).nextOrDefaultExplicit(NodeGraphFieldContainerImpl.class, null); } @Override public void setNextVersion(NodeGraphFieldContainer container) { setSingleLinkOutTo(container.getImpl(), HAS_VERSION); } @Override public NodeGraphFieldContainer getPreviousVersion() { return in(HAS_VERSION).has(NodeGraphFieldContainerImpl.class).nextOrDefaultExplicit(NodeGraphFieldContainerImpl.class, null); } @Override public void clone(NodeGraphFieldContainer container) { List<GraphField> otherFields = container.getFields(container.getSchemaContainerVersion().getSchema()); for (GraphField graphField : otherFields) { graphField.cloneTo(this); } } @Override public boolean isPublished(String releaseUuid) { EdgeTraversal<?, ?, ?> traversal = inE(HAS_FIELD_CONTAINER) .has(GraphFieldContainerEdgeImpl.RELEASE_UUID_KEY, releaseUuid) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, Type.PUBLISHED.getCode()); return traversal.hasNext(); } @Override public void validate() { Schema schema = getSchemaContainerVersion().getSchema(); Map<String, GraphField> fieldsMap = getFields(schema).stream() .collect(Collectors.toMap(GraphField::getFieldKey, Function.identity())); schema.getFields().stream().forEach(fieldSchema -> { GraphField field = fieldsMap.get(fieldSchema.getName()); if (fieldSchema.isRequired() && field == null) { throw error(CONFLICT, "node_error_missing_mandatory_field_value", fieldSchema.getName(), schema.getName()); } if (field != null) { field.validate(); } }); } @Override public void addIndexBatchEntry(SearchQueueBatch batch, SearchQueueEntryAction action) { String indexType = NodeIndexHandler.getDocumentType(getSchemaContainerVersion()); batch.addEntry(getParentNode().getUuid() + "-" + getLanguage().getLanguageTag(), getParentNode().getType(), action, indexType); }
<<<<<<< import java.util.Optional; ======= import com.fasterxml.jackson.annotation.JsonProperty; >>>>>>> import java.util.Optional; import com.fasterxml.jackson.annotation.JsonProperty; <<<<<<< @Override public Optional<SchemaChangeModelImpl> compareTo(FieldSchema fieldSchema) { if (fieldSchema instanceof StringFieldSchema) { StringFieldSchema stringFieldSchema = (StringFieldSchema) fieldSchema; if (isRequired() != stringFieldSchema.isRequired()) { return Optional.of(new SchemaChangeModelImpl().setOperation(SchemaChangeOperation.UPDATEFIELD)); } } else { //TODO type change } return Optional.empty(); } ======= @Override public String[] getAllowedValues() { return allowedValues; } @Override public void setAllowedValues(String[] allowedValues) { this.allowedValues = allowedValues; } >>>>>>> @Override public Optional<SchemaChangeModelImpl> compareTo(FieldSchema fieldSchema) { if (fieldSchema instanceof StringFieldSchema) { StringFieldSchema stringFieldSchema = (StringFieldSchema) fieldSchema; if (isRequired() != stringFieldSchema.isRequired()) { return Optional.of(new SchemaChangeModelImpl().setOperation(SchemaChangeOperation.UPDATEFIELD)); } } else { //TODO type change } return Optional.empty(); } @Override public String[] getAllowedValues() { return allowedValues; } @Override public void setAllowedValues(String[] allowedValues) { this.allowedValues = allowedValues; }
<<<<<<< import com.gentics.mesh.core.rest.node.field.list.impl.HtmlFieldListImpl; ======= import com.gentics.mesh.core.data.search.SearchQueueBatch; >>>>>>> import com.gentics.mesh.core.rest.node.field.list.impl.HtmlFieldListImpl; import com.gentics.mesh.core.data.search.SearchQueueBatch;
<<<<<<< import static com.gentics.mesh.search.index.MappingHelper.ANALYZED; import static com.gentics.mesh.search.index.MappingHelper.BOOLEAN; import static com.gentics.mesh.search.index.MappingHelper.DATE; import static com.gentics.mesh.search.index.MappingHelper.DOUBLE; import static com.gentics.mesh.search.index.MappingHelper.LONG; import static com.gentics.mesh.search.index.MappingHelper.NAME_KEY; import static com.gentics.mesh.search.index.MappingHelper.NESTED; import static com.gentics.mesh.search.index.MappingHelper.NOT_ANALYZED; import static com.gentics.mesh.search.index.MappingHelper.OBJECT; import static com.gentics.mesh.search.index.MappingHelper.STRING; import static com.gentics.mesh.search.index.MappingHelper.TRIGRAM_ANALYZER; import static com.gentics.mesh.search.index.MappingHelper.UUID_KEY; import static com.gentics.mesh.search.index.MappingHelper.notAnalyzedType; import static com.gentics.mesh.search.index.MappingHelper.trigramStringType; import static com.gentics.mesh.util.DateUtils.toISO8601; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.inject.Inject; import javax.inject.Singleton; import org.apache.commons.lang3.NotImplementedException; ======= >>>>>>> import static com.gentics.mesh.search.index.MappingHelper.ANALYZED; import static com.gentics.mesh.search.index.MappingHelper.BOOLEAN; import static com.gentics.mesh.search.index.MappingHelper.DATE; import static com.gentics.mesh.search.index.MappingHelper.DOUBLE; import static com.gentics.mesh.search.index.MappingHelper.LONG; import static com.gentics.mesh.search.index.MappingHelper.NAME_KEY; import static com.gentics.mesh.search.index.MappingHelper.NESTED; import static com.gentics.mesh.search.index.MappingHelper.NOT_ANALYZED; import static com.gentics.mesh.search.index.MappingHelper.OBJECT; import static com.gentics.mesh.search.index.MappingHelper.STRING; import static com.gentics.mesh.search.index.MappingHelper.TRIGRAM_ANALYZER; import static com.gentics.mesh.search.index.MappingHelper.UUID_KEY; import static com.gentics.mesh.search.index.MappingHelper.notAnalyzedType; import static com.gentics.mesh.search.index.MappingHelper.trigramStringType; import static com.gentics.mesh.util.DateUtils.toISO8601; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.inject.Inject; import javax.inject.Singleton; import org.apache.commons.lang3.NotImplementedException;
<<<<<<< void markAllAsApplied(MeshRoot meshRoot); ======= public void markAllAsApplied(MeshRoot meshRoot) { db.tx(tx -> { List<HighLevelChange> changes = highLevelChangesList.getList(); for (HighLevelChange change : changes) { meshRoot.getChangelogRoot().add(change, 0); } tx.success(); }); } /** * Check whether any high level changelog entry needs to be applied. * * @param meshRoot * @return */ public boolean requiresChanges(MeshRoot meshRoot) { return db.tx(tx -> { List<HighLevelChange> changes = highLevelChangesList.getList(); for (HighLevelChange change : changes) { if (!isApplied(meshRoot, change)) { return true; } } return false; }); } >>>>>>> void markAllAsApplied(MeshRoot meshRoot); /** * Check whether any high level changelog entry needs to be applied. * * @param meshRoot * @return */ boolean requiresChanges(MeshRoot meshRoot);
<<<<<<< private Database database; private final boolean clustered; ======= private final Database database; private final SearchQueue searchQueue; private final MetricsService metrics; >>>>>>> private final Database database; private final MetricsService metrics; private final boolean clustered; <<<<<<< public HandlerUtilities(Database database, MeshOptions meshOptions) { ======= public HandlerUtilities(Database database, SearchQueue searchQueue, MeshOptions meshOptions, MetricsService metrics) { this.searchQueue = searchQueue; >>>>>>> public HandlerUtilities(Database database, MeshOptions meshOptions, MetricsService metrics) {
<<<<<<< Project project = MeshInternal.get().boot().projectRoot().findByName(TestDataProvider.PROJECT_NAME); ac.data().put(RouterStorage.PROJECT2_CONTEXT_KEY, project); ======= ac.data().put(RouterStorage.PROJECT_CONTEXT_KEY, TestFullDataProvider.PROJECT_NAME); >>>>>>> ac.data().put(RouterStorage.PROJECT2_CONTEXT_KEY, TestFullDataProvider.PROJECT_NAME); <<<<<<< ac.data().put(RouterStorage.PROJECT2_CONTEXT_KEY, MeshInternal.get().boot().projectRoot().findByName(TestDataProvider.PROJECT_NAME)); ======= ac.data().put(RouterStorage.PROJECT_CONTEXT_KEY, TestFullDataProvider.PROJECT_NAME); >>>>>>> ac.data().put(RouterStorage.PROJECT2_CONTEXT_KEY, TestFullDataProvider.PROJECT_NAME);
<<<<<<< ContentDaoWrapper contentDao = tx.contentDao(); String path = "/News/2015/blume.jpg"; HibNode node = content("news_2015"); ======= ContentDaoWrapper contentDao = tx.data().contentDao(); node = content("news_2015"); >>>>>>> ContentDaoWrapper contentDao = tx.contentDao(); node = content("news_2015");
<<<<<<< String pathSegementFieldValue = newsNode.getPathSegment(project().getLatestBranch().getUuid(), ContainerType.DRAFT, english() .getLanguageTag()); ======= String pathSegementFieldValue = newsNode.getPathSegment(project().getLatestRelease().getUuid(), ContainerType.DRAFT, english() .getLanguageTag()); >>>>>>> String pathSegementFieldValue = newsNode.getPathSegment(project().getLatestBranch().getUuid(), ContainerType.DRAFT, english() .getLanguageTag()); <<<<<<< subNode.deleteFromBranch(ac, project().getLatestBranch(), batch, false); ======= subNode.deleteFromRelease(ac, project().getLatestRelease(), context, false); >>>>>>> subNode.deleteFromBranch(ac, project().getLatestBranch(), context, false); <<<<<<< node.deleteFromBranch(ac, project().getLatestBranch(), batch, false); ======= node.deleteFromRelease(ac, project().getLatestRelease(), context, false); >>>>>>> node.deleteFromBranch(ac, project().getLatestBranch(), context, false); <<<<<<< project.getNodeRoot().findAll(mockActionContext("branch=" + initialBranch.getName()), new PagingParametersImpl(1, 10000, "name", SortOrder.ASCENDING)).forEach(node -> nodes.add(node)); assertThat(nodes).as("Nodes in initial branch").usingElementComparatorOnFields("uuid").doesNotContain(subFolder, subSubFolder); assertThat(folder).as("folder").hasNoChildren(initialBranch); ======= project.getNodeRoot().findAll(mockActionContext("release=" + initialRelease.getName()), new PagingParametersImpl(1, 10000, "name", SortOrder.ASCENDING)).forEach(node -> nodes.add(node)); assertThat(nodes).as("Nodes in initial release").usingElementComparatorOnFields("uuid").doesNotContain(subFolder, subSubFolder); assertThat(folder).as("folder").hasNoChildren(initialRelease); >>>>>>> project.getNodeRoot().findAll(mockActionContext("release=" + initialBranch.getName()), new PagingParametersImpl(1, 10000, "name", SortOrder.ASCENDING)).forEach(node -> nodes.add(node)); assertThat(nodes).as("Nodes in initial release").usingElementComparatorOnFields("uuid").doesNotContain(subFolder, subSubFolder); assertThat(folder).as("folder").hasNoChildren(initialBranch); <<<<<<< SearchQueueBatch batch = createBatch(); folder.applyPermissions(batch, role(), false, new HashSet<>(Arrays.asList(GraphPermission.READ_PERM, GraphPermission.READ_PUBLISHED_PERM)), Collections.emptySet()); folder.createGraphFieldContainer(english(), initialBranch, user()).createString("name").setString("Folder"); folder.publish(mockActionContext(), batch); ======= BulkActionContext bac = createBulkContext(); folder.applyPermissions(bac.batch(), role(), false, new HashSet<>(Arrays.asList(GraphPermission.READ_PERM, GraphPermission.READ_PUBLISHED_PERM)), Collections.emptySet()); folder.createGraphFieldContainer(english(), initialRelease, user()).createString("name").setString("Folder"); folder.publish(mockActionContext(), bac); >>>>>>> BulkActionContext bac = createBulkContext(); folder.applyPermissions(bac.batch(), role(), false, new HashSet<>(Arrays.asList(GraphPermission.READ_PERM, GraphPermission.READ_PUBLISHED_PERM)), Collections.emptySet()); folder.createGraphFieldContainer(english(), initialBranch, user()).createString("name").setString("Folder"); folder.publish(mockActionContext(), bac); <<<<<<< SearchQueueBatch batch = db().tx(() -> { SearchQueueBatch innerBatch = createBatch(); meshRoot().getNodeRoot().findByUuid(folderUuid).deleteFromBranch(ac, initialBranch, innerBatch, false); return innerBatch; ======= BulkActionContext context = db().tx(() -> { BulkActionContext innerContext = createBulkContext(); meshRoot().getNodeRoot().findByUuid(folderUuid).deleteFromRelease(ac, initialRelease, innerContext, false); return innerContext; >>>>>>> BulkActionContext context = db().tx(() -> { BulkActionContext innerContext = createBulkContext(); meshRoot().getNodeRoot().findByUuid(folderUuid).deleteFromBranch(ac, initialBranch, innerContext, false); return innerContext; <<<<<<< initialBranch.getUuid(), ContainerType.DRAFT, "en")); ======= initialRelease.getUuid(), ContainerType.DRAFT, "en")); >>>>>>> initialBranch.getUuid(), ContainerType.DRAFT, "en")); <<<<<<< initialBranch.getUuid(), ContainerType.PUBLISHED, "en")); ======= initialRelease.getUuid(), ContainerType.PUBLISHED, "en")); >>>>>>> initialBranch.getUuid(), ContainerType.PUBLISHED, "en")); <<<<<<< SearchQueueBatch batch = createBatch(); folder.applyPermissions(batch, role(), false, new HashSet<>(Arrays.asList(GraphPermission.READ_PERM, GraphPermission.READ_PUBLISHED_PERM)), Collections.emptySet()); folder.createGraphFieldContainer(english(), initialBranch, user()).createString("name").setString("Folder"); folder.publish(mockActionContext(), batch); ======= BulkActionContext bac = createBulkContext(); folder.applyPermissions(bac.batch(), role(), false, new HashSet<>(Arrays.asList(GraphPermission.READ_PERM, GraphPermission.READ_PUBLISHED_PERM)), Collections.emptySet()); folder.createGraphFieldContainer(english(), initialRelease, user()).createString("name").setString("Folder"); folder.publish(mockActionContext(), bac); >>>>>>> BulkActionContext bac = createBulkContext(); folder.applyPermissions(bac.batch(), role(), false, new HashSet<>(Arrays.asList(GraphPermission.READ_PERM, GraphPermission.READ_PUBLISHED_PERM)), Collections.emptySet()); folder.createGraphFieldContainer(english(), initialBranch, user()).createString("name").setString("Folder"); folder.publish(mockActionContext(), bac); <<<<<<< SearchQueueBatch batch = tx(() -> { SearchQueueBatch innerBatch = createBatch(); meshRoot().getNodeRoot().findByUuid(folderUuid).deleteFromBranch(ac, initialBranch, innerBatch, false); return innerBatch; ======= BulkActionContext context = createBulkContext(); tx(() -> { meshRoot().getNodeRoot().findByUuid(folderUuid).deleteFromRelease(ac, initialRelease, context, false); >>>>>>> BulkActionContext context = createBulkContext(); tx(() -> { meshRoot().getNodeRoot().findByUuid(folderUuid).deleteFromBranch(ac, initialBranch, context, false); <<<<<<< initialBranch.getUuid(), ContainerType.DRAFT, "en")); ======= initialRelease.getUuid(), ContainerType.DRAFT, "en")); >>>>>>> initialBranch.getUuid(), ContainerType.DRAFT, "en")); <<<<<<< initialBranch.getUuid(), ContainerType.PUBLISHED, "en")); assertThat(batch).containsEntries(expectedEntries); ======= initialRelease.getUuid(), ContainerType.PUBLISHED, "en")); assertThat(context.batch()).containsEntries(expectedEntries); >>>>>>> initialBranch.getUuid(), ContainerType.PUBLISHED, "en")); assertThat(context.batch()).containsEntries(expectedEntries);
<<<<<<< /** * Return the field schema with the given name. * * @param fieldName * @return */ Optional<FieldSchema> getFieldSchema(String fieldName); /** * Return the schema version. * * @return */ String getVersion(); /** * Set the schema version. * * @param version */ void setVersion(String version); ======= >>>>>>> /** * Return the schema version. * * @return */ String getVersion(); /** * Set the schema version. * * @param version */ void setVersion(String version);
<<<<<<< import static org.assertj.core.api.Assertions.assertThat; ======= import static com.gentics.mesh.util.MeshAssert.latchFor; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; >>>>>>> import static com.gentics.mesh.util.MeshAssert.latchFor; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static org.assertj.core.api.Assertions.assertThat; <<<<<<< NodeResponse response = createNode(null, (Field) null); NodeFieldListImpl nodeField = response.getFields().getNodeListField(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck(null, (Field) null); NodeFieldListImpl nodeField = response.getFields().getNodeListField("listField"); >>>>>>> NodeResponse response = createNode(null, (Field) null); NodeFieldListImpl nodeField = response.getFields().getNodeListField(FIELD_NAME); <<<<<<< NodeResponse response = createNode(FIELD_NAME, (Field) null); StringFieldListImpl nodeField = response.getFields().getStringFieldList(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", (Field) null); StringFieldListImpl nodeField = response.getFields().getStringFieldList("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, (Field) null); StringFieldListImpl nodeField = response.getFields().getStringFieldList(FIELD_NAME); <<<<<<< NodeResponse response = createNode(FIELD_NAME, listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList(FIELD_NAME); <<<<<<< NodeResponse response = createNode(FIELD_NAME, listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList(FIELD_NAME); <<<<<<< NodeResponse response = createNode(null, (Field) null); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck(null, (Field) null); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList("listField"); >>>>>>> NodeResponse response = createNode(null, (Field) null); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList(FIELD_NAME); <<<<<<< NodeResponse response = createNode(FIELD_NAME, listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList(FIELD_NAME); <<<<<<< NodeResponse response = createNode(FIELD_NAME, listField); HtmlFieldListImpl listFromResponse = response.getFields().getHtmlFieldList(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", listField); HtmlFieldListImpl listFromResponse = response.getFields().getHtmlFieldList("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, listField); HtmlFieldListImpl listFromResponse = response.getFields().getHtmlFieldList(FIELD_NAME); <<<<<<< NodeResponse response = createNode(FIELD_NAME, listField); BooleanFieldListImpl listFromResponse = response.getFields().getBooleanListField(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", listField); BooleanFieldListImpl listFromResponse = response.getFields().getBooleanListField("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, listField); BooleanFieldListImpl listFromResponse = response.getFields().getBooleanListField(FIELD_NAME); <<<<<<< NodeResponse response = createNode(FIELD_NAME, listField); DateFieldListImpl listFromResponse = response.getFields().getDateFieldList(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", listField); DateFieldListImpl listFromResponse = response.getFields().getDateFieldList("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, listField); DateFieldListImpl listFromResponse = response.getFields().getDateFieldList(FIELD_NAME); <<<<<<< NodeResponse response = createNode(FIELD_NAME, listField); NumberFieldListImpl listFromResponse = response.getFields().getNumberFieldList(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", listField); NumberFieldListImpl listFromResponse = response.getFields().getNumberFieldList("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, listField); NumberFieldListImpl listFromResponse = response.getFields().getNumberFieldList(FIELD_NAME); <<<<<<< List<List<String>> valueCombinations = Arrays.asList(Arrays.asList("A", "B", "C"), Arrays.asList("C", "B", "A"), Collections.emptyList(), Arrays.asList("X", "Y"), Arrays.asList("C")); for (int i = 0; i < 20; i++) { NodeGraphFieldContainer container = node.getGraphFieldContainer("en"); List<String> oldValue = getListValues(container, StringGraphFieldListImpl.class, FIELD_NAME); List<String> newValue = valueCombinations.get(i % valueCombinations.size()); StringFieldListImpl list = new StringFieldListImpl(); for (String value : newValue) { list.add(value); } NodeResponse response = updateNode(FIELD_NAME, list); StringFieldListImpl field = response.getFields().getStringFieldList(FIELD_NAME); assertThat(field.getItems()).as("Updated field").containsExactlyElementsOf(list.getItems()); node.reload(); container.reload(); assertEquals("Check version number", container.getVersion().nextDraft().toString(), response.getVersion().getNumber()); assertEquals("Check old value", oldValue, getListValues(container, StringGraphFieldListImpl.class, FIELD_NAME)); ======= StringFieldListImpl listField = new StringFieldListImpl(); listField.add("A"); listField.add("B"); listField.add("C"); NodeResponse response = createNodeAndCheck("listField", listField); StringFieldListImpl listFromResponse = response.getFields().getStringFieldList("listField"); assertEquals(3, listFromResponse.getItems().size()); for (int i = 0; i < listField.getItems().size(); i++) { assertEquals("Check item #" + (i + 1), listField.getItems().get(i), listFromResponse.getItems().get(i)); >>>>>>> List<List<String>> valueCombinations = Arrays.asList(Arrays.asList("A", "B", "C"), Arrays.asList("C", "B", "A"), Collections.emptyList(), Arrays.asList("X", "Y"), Arrays.asList("C")); for (int i = 0; i < 20; i++) { NodeGraphFieldContainer container = node.getGraphFieldContainer("en"); List<String> oldValue = getListValues(container, StringGraphFieldListImpl.class, FIELD_NAME); List<String> newValue = valueCombinations.get(i % valueCombinations.size()); StringFieldListImpl list = new StringFieldListImpl(); for (String value : newValue) { list.add(value); } NodeResponse response = updateNode(FIELD_NAME, list); StringFieldListImpl field = response.getFields().getStringFieldList(FIELD_NAME); assertThat(field.getItems()).as("Updated field").containsExactlyElementsOf(list.getItems()); node.reload(); container.reload(); assertEquals("Check version number", container.getVersion().nextDraft().toString(), response.getVersion().getNumber()); assertEquals("Check old value", oldValue, getListValues(container, StringGraphFieldListImpl.class, FIELD_NAME)); <<<<<<< List<List<String>> valueCombinations = Arrays.asList(Arrays.asList("A", "B", "C"), Arrays.asList("C", "B", "A"), Collections.emptyList(), Arrays.asList("X", "Y"), Arrays.asList("C")); for (int i = 0; i < 20; i++) { NodeGraphFieldContainer container = node.getGraphFieldContainer("en"); List<String> oldValue = getListValues(container, HtmlGraphFieldListImpl.class, FIELD_NAME); List<String> newValue = valueCombinations.get(i % valueCombinations.size()); HtmlFieldListImpl list = new HtmlFieldListImpl(); for (String value : newValue) { list.add(value); } NodeResponse response = updateNode(FIELD_NAME, list); HtmlFieldListImpl field = response.getFields().getHtmlFieldList(FIELD_NAME); assertThat(field.getItems()).as("Updated field").containsExactlyElementsOf(list.getItems()); node.reload(); container.reload(); assertEquals("Check version number", container.getVersion().nextDraft().toString(), response.getVersion().getNumber()); assertEquals("Check old value", oldValue, getListValues(container, HtmlGraphFieldListImpl.class, FIELD_NAME)); } ======= HtmlFieldListImpl listField = new HtmlFieldListImpl(); listField.add("A"); listField.add("B"); listField.add("C"); NodeResponse response = createNodeAndCheck("listField", listField); HtmlFieldListImpl listFromResponse = response.getFields().getHtmlFieldList("listField"); assertEquals(3, listFromResponse.getItems().size()); // Add another item to the list and update the node listField.add("D"); response = updateNode("listField", listField); listFromResponse = response.getFields().getHtmlFieldList("listField"); assertEquals(4, listFromResponse.getItems().size()); >>>>>>> List<List<String>> valueCombinations = Arrays.asList(Arrays.asList("A", "B", "C"), Arrays.asList("C", "B", "A"), Collections.emptyList(), Arrays.asList("X", "Y"), Arrays.asList("C")); for (int i = 0; i < 20; i++) { NodeGraphFieldContainer container = node.getGraphFieldContainer("en"); List<String> oldValue = getListValues(container, HtmlGraphFieldListImpl.class, FIELD_NAME); List<String> newValue = valueCombinations.get(i % valueCombinations.size()); HtmlFieldListImpl list = new HtmlFieldListImpl(); for (String value : newValue) { list.add(value); } NodeResponse response = updateNode(FIELD_NAME, list); HtmlFieldListImpl field = response.getFields().getHtmlFieldList(FIELD_NAME); assertThat(field.getItems()).as("Updated field").containsExactlyElementsOf(list.getItems()); node.reload(); container.reload(); assertEquals("Check version number", container.getVersion().nextDraft().toString(), response.getVersion().getNumber()); assertEquals("Check old value", oldValue, getListValues(container, HtmlGraphFieldListImpl.class, FIELD_NAME)); } <<<<<<< List<List<Long>> valueCombinations = Arrays.asList(Arrays.asList(1L, 2L, 3L), Arrays.asList(3L, 2L, 1L), Collections.emptyList(), Arrays.asList(4711L, 815L), Arrays.asList(3L)); for (int i = 0; i < 20; i++) { NodeGraphFieldContainer container = node.getGraphFieldContainer("en"); List<Long> oldValue = getListValues(container, DateGraphFieldListImpl.class, FIELD_NAME); List<Long> newValue = valueCombinations.get(i % valueCombinations.size()); DateFieldListImpl list = new DateFieldListImpl(); for (Long value : newValue) { list.add(value); } NodeResponse response = updateNode(FIELD_NAME, list); DateFieldListImpl field = response.getFields().getDateFieldList(FIELD_NAME); assertThat(field.getItems()).as("Updated field").containsExactlyElementsOf(list.getItems()); node.reload(); container.reload(); assertEquals("Check version number", container.getVersion().nextDraft().toString(), response.getVersion().getNumber()); assertEquals("Check old value", oldValue, getListValues(container, DateGraphFieldListImpl.class, FIELD_NAME)); } ======= DateFieldListImpl listField = new DateFieldListImpl(); listField.add(1L); listField.add(2L); listField.add(3L); NodeResponse response = createNodeAndCheck("listField", listField); DateFieldListImpl listFromResponse = response.getFields().getDateFieldList("listField"); assertEquals(3, listFromResponse.getItems().size()); // Add another item to the list and update the node listField.add(4L); response = updateNode("listField", listField); listFromResponse = response.getFields().getDateFieldList("listField"); assertEquals(4, listFromResponse.getItems().size()); >>>>>>> List<List<Long>> valueCombinations = Arrays.asList(Arrays.asList(1L, 2L, 3L), Arrays.asList(3L, 2L, 1L), Collections.emptyList(), Arrays.asList(4711L, 815L), Arrays.asList(3L)); for (int i = 0; i < 20; i++) { NodeGraphFieldContainer container = node.getGraphFieldContainer("en"); List<Long> oldValue = getListValues(container, DateGraphFieldListImpl.class, FIELD_NAME); List<Long> newValue = valueCombinations.get(i % valueCombinations.size()); DateFieldListImpl list = new DateFieldListImpl(); for (Long value : newValue) { list.add(value); } NodeResponse response = updateNode(FIELD_NAME, list); DateFieldListImpl field = response.getFields().getDateFieldList(FIELD_NAME); assertThat(field.getItems()).as("Updated field").containsExactlyElementsOf(list.getItems()); node.reload(); container.reload(); assertEquals("Check version number", container.getVersion().nextDraft().toString(), response.getVersion().getNumber()); assertEquals("Check old value", oldValue, getListValues(container, DateGraphFieldListImpl.class, FIELD_NAME)); } <<<<<<< @Test public void testUpdateNodeWithBooleanField() throws IOException { setSchema("boolean"); Node node = folder("2015"); ======= NodeResponse response = createNodeAndCheck("listField", listField); NumberFieldListImpl listFromResponse = response.getFields().getNumberFieldList("listField"); assertEquals(3, listFromResponse.getItems().size()); >>>>>>> @Test public void testUpdateNodeWithBooleanField() throws IOException { setSchema("boolean"); Node node = folder("2015"); <<<<<<< NodeResponse response = createNode(FIELD_NAME, listField); NodeFieldListImpl listFromResponse = response.getFields().getNodeListField(FIELD_NAME); ======= NodeResponse response = createNodeAndCheck("listField", listField); NodeFieldListImpl listFromResponse = response.getFields().getNodeListField("listField"); >>>>>>> NodeResponse response = createNode(FIELD_NAME, listField); NodeFieldListImpl listFromResponse = response.getFields().getNodeListField(FIELD_NAME);
<<<<<<< import com.gentics.mesh.core.data.search.SearchQueueEntry; ======= import com.gentics.mesh.core.data.search.SearchQueueBatch; import com.gentics.mesh.core.data.search.SearchQueueEntry; >>>>>>> import com.gentics.mesh.core.data.search.SearchQueueBatch; import com.gentics.mesh.core.data.search.SearchQueueEntry; <<<<<<< * Update the search index document which is represented by the given object. * * @param object * @param entry * @return */ public Observable<Void> update(T object, SearchQueueEntry entry) { return searchProvider.updateDocument(getIndex(), getType(), object.getUuid(), transformToDocumentMap(object)); } @Override public Observable<Void> update(String uuid, String type, SearchQueueEntry entry) { ObservableFuture<Void> fut = RxHelper.observableFuture(); getRootVertex().findByUuid(uuid).map(element -> { if (element == null) { return Observable.error(new Exception("Element {" + uuid + "} for index type {" + type + "} could not be found within graph.")); } else { return update(element, entry); } }); return fut; } /** ======= >>>>>>> <<<<<<< public Observable<Void> delete(String uuid, String type, SearchQueueEntry entry) { ======= public Observable<Void> delete(String uuid, String documentType) { >>>>>>> public Observable<Void> delete(String uuid, String documentType) { <<<<<<< case CREATE_ACTION: return store(uuid, indexType, entry); ======= >>>>>>> <<<<<<< return delete(uuid, indexType, entry); case UPDATE_ACTION: // update(uuid, handler); return store(uuid, indexType, entry); ======= return delete(uuid, indexType); case STORE_ACTION: return store(uuid, indexType); case REINDEX_ALL: return reindexAll(); >>>>>>> return delete(uuid, indexType); case STORE_ACTION: return store(uuid, indexType); case REINDEX_ALL: return reindexAll();
<<<<<<< + fromContainerVersion.getUuid() + "} to version {" + toContainerVersion.getUuid() + "} for branch {" + branch.getUuid() + "} in project {" + project.getUuid() + "}"); ======= + fromContainerVersion.getUuid() + "} to version {" + toContainerVersion.getUuid() + "} for release {" + release.getUuid() + "} in project {" + project.getUuid() + "}"); >>>>>>> + fromContainerVersion.getUuid() + "} to version {" + toContainerVersion.getUuid() + "} for release {" + branch.getUuid() + "} in project {" + project.getUuid() + "}"); <<<<<<< MeshInternal.get().nodeMigrationHandler().migrateNodes(project, branch, fromContainerVersion, toContainerVersion, status).blockingAwait(); ======= MeshInternal.get().nodeMigrationHandler().migrateNodes(ac, project, release, fromContainerVersion, toContainerVersion, status) .blockingAwait(); >>>>>>> MeshInternal.get().nodeMigrationHandler().migrateNodes(ac, project, branch, fromContainerVersion, toContainerVersion, status) .blockingAwait(); <<<<<<< finalizeMigration(project, branch, fromContainerVersion); ======= JobWarningList warnings = new JobWarningList(); if (!ac.getConflicts().isEmpty()) { for (ConflictWarning conflict : ac.getConflicts()) { log.info("Encountered conflict {" + conflict + "} which was automatically resolved."); warnings.add(conflict); } } setWarnings(warnings); finalizeMigration(project, release, fromContainerVersion); >>>>>>> JobWarningList warnings = new JobWarningList(); if (!ac.getConflicts().isEmpty()) { for (ConflictWarning conflict : ac.getConflicts()) { log.info("Encountered conflict {" + conflict + "} which was automatically resolved."); warnings.add(conflict); } } setWarnings(warnings); finalizeMigration(project, branch, fromContainerVersion);
<<<<<<< checkOut(project, HAS_BRANCH_ROOT, BranchRootImpl.class, response, HIGH); checkOut(project, HAS_NODE_ROOT, NodeRootImpl.class, response, HIGH); checkOut(project, HAS_TAGFAMILY_ROOT, TagFamilyRootImpl.class, response, HIGH); checkOut(project, HAS_ROOT_NODE, NodeImpl.class, response, HIGH); checkOut(project, HAS_SCHEMA_ROOT, ProjectSchemaContainerRootImpl.class, response, HIGH); checkOut(project, HAS_MICROSCHEMA_ROOT, ProjectMicroschemaContainerRootImpl.class, response, HIGH); ======= checkOut(project, HAS_RELEASE_ROOT, ReleaseRootImpl.class, result, HIGH); checkOut(project, HAS_NODE_ROOT, NodeRootImpl.class, result, HIGH); checkOut(project, HAS_TAGFAMILY_ROOT, TagFamilyRootImpl.class, result, HIGH); checkOut(project, HAS_ROOT_NODE, NodeImpl.class, result, HIGH); checkOut(project, HAS_SCHEMA_ROOT, ProjectSchemaContainerRootImpl.class, result, HIGH); checkOut(project, HAS_MICROSCHEMA_ROOT, ProjectMicroschemaContainerRootImpl.class, result, HIGH); >>>>>>> checkOut(project, HAS_BRANCH_ROOT, BranchRootImpl.class, result, HIGH); checkOut(project, HAS_NODE_ROOT, NodeRootImpl.class, result, HIGH); checkOut(project, HAS_TAGFAMILY_ROOT, TagFamilyRootImpl.class, result, HIGH); checkOut(project, HAS_ROOT_NODE, NodeImpl.class, result, HIGH); checkOut(project, HAS_SCHEMA_ROOT, ProjectSchemaContainerRootImpl.class, result, HIGH); checkOut(project, HAS_MICROSCHEMA_ROOT, ProjectMicroschemaContainerRootImpl.class, result, HIGH);
<<<<<<< import com.gentics.mesh.core.data.NodeGraphFieldContainer; import com.gentics.mesh.core.data.Release; ======= import com.gentics.mesh.core.data.NodeGraphFieldContainer; >>>>>>> import com.gentics.mesh.core.data.NodeGraphFieldContainer; import com.gentics.mesh.core.data.Release; import com.gentics.mesh.core.data.NodeGraphFieldContainer; <<<<<<< public static ReleaseAssert assertThat(Release actual) { return new ReleaseAssert(actual); } public static ReleaseResponseAssert assertThat(ReleaseResponse actual) { return new ReleaseResponseAssert(actual); } public static NodeGraphFieldContainerAssert assertThat(NodeGraphFieldContainer actual) { return new NodeGraphFieldContainerAssert(actual); } ======= public static NodeGraphFieldContainerAssert assertThat(NodeGraphFieldContainer actual) { return new NodeGraphFieldContainerAssert(actual); } public static FieldMapAssert assertThat(FieldMap actual) { return new FieldMapAssert(actual); } >>>>>>> public static ReleaseAssert assertThat(Release actual) { return new ReleaseAssert(actual); } public static ReleaseResponseAssert assertThat(ReleaseResponse actual) { return new ReleaseResponseAssert(actual); } public static NodeGraphFieldContainerAssert assertThat(NodeGraphFieldContainer actual) { return new NodeGraphFieldContainerAssert(actual); } public static FieldMapAssert assertThat(FieldMap actual) { return new FieldMapAssert(actual); }
<<<<<<< ======= import com.gentics.mesh.core.rest.user.NodeReference; import com.gentics.mesh.graphdb.Tx; >>>>>>> import com.gentics.mesh.core.rest.user.NodeReference;
<<<<<<< import com.gentics.mesh.core.data.impl.GraphFieldContainerEdgeImpl; ======= import com.gentics.mesh.core.data.generic.MeshVertexImpl; >>>>>>> import com.gentics.mesh.core.data.generic.MeshVertexImpl; import com.gentics.mesh.core.data.impl.GraphFieldContainerEdgeImpl;
<<<<<<< import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.core.rest.common.ContainerType; import com.gentics.mesh.core.rest.event.role.TagPermissionChangedEventModel; import com.gentics.mesh.core.rest.event.tag.TagMeshEventModel; import com.gentics.mesh.core.rest.project.ProjectReference; ======= import com.gentics.mesh.core.data.relationship.GraphPermission; import com.gentics.mesh.core.data.search.SearchQueueBatch; import com.gentics.mesh.core.data.search.context.impl.GenericEntryContextImpl; >>>>>>> import com.gentics.mesh.core.data.relationship.GraphPermission; import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.core.rest.common.ContainerType; import com.gentics.mesh.core.rest.event.role.TagPermissionChangedEventModel; import com.gentics.mesh.core.rest.event.tag.TagMeshEventModel; import com.gentics.mesh.core.rest.project.ProjectReference;
<<<<<<< import java.awt.image.BufferedImage; import org.assertj.core.api.Assertions; import com.gentics.mesh.assertj.impl.BranchAssert; import com.gentics.mesh.assertj.impl.BranchResponseAssert; ======= import com.gentics.mesh.assertj.impl.BranchAssert; import com.gentics.mesh.assertj.impl.BranchResponseAssert; >>>>>>> import java.awt.image.BufferedImage; import org.assertj.core.api.Assertions; import com.gentics.mesh.assertj.impl.BranchAssert; import com.gentics.mesh.assertj.impl.BranchResponseAssert; import com.gentics.mesh.rest.client.MeshWebrootResponse; <<<<<<< import com.gentics.mesh.assertj.impl.MeshElementEventModelAssert; import com.gentics.mesh.assertj.impl.MeshEventModelAssert; ======= import com.gentics.mesh.assertj.impl.MeshRestClientMessageExceptionAssert; >>>>>>> import com.gentics.mesh.assertj.impl.MeshElementEventModelAssert; import com.gentics.mesh.assertj.impl.MeshEventModelAssert; import com.gentics.mesh.assertj.impl.MeshRestClientMessageExceptionAssert; <<<<<<< import com.gentics.mesh.rest.client.MeshWebrootResponse; ======= import com.gentics.mesh.rest.client.MeshRestClientMessageException; import com.gentics.mesh.rest.client.MeshWebrootResponse; >>>>>>> import com.gentics.mesh.rest.client.MeshRestClientMessageException;
<<<<<<< ======= import static com.gentics.mesh.core.data.relationship.GraphPermission.READ_PERM; >>>>>>> <<<<<<< ======= import java.util.Collections; import java.util.HashSet; import java.util.Set; >>>>>>> <<<<<<< ======= import com.gentics.mesh.madl.traversal.TraversalResult; import com.gentics.mesh.parameter.GenericParameters; import com.gentics.mesh.parameter.PagingParameters; import com.gentics.mesh.parameter.value.FieldsSet; import com.syncleus.ferma.traversals.VertexTraversal; >>>>>>>
<<<<<<< ======= import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; import com.gentics.mesh.cli.PostProcessFlags; >>>>>>> import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; import com.gentics.mesh.cli.PostProcessFlags; <<<<<<< void apply(MeshRoot meshRoot); ======= public void apply(PostProcessFlags flags, MeshRoot meshRoot) { List<HighLevelChange> changes = highLevelChangesList.getList(); for (HighLevelChange change : changes) { db.tx(tx2 -> { if (!isApplied(meshRoot, change)) { try { long start = System.currentTimeMillis(); db.tx(tx -> { if (log.isDebugEnabled()) { log.debug("Executing change {" + change.getName() + "}/{" + change.getUuid() + "}"); } change.apply(); tx.success(); }); change.applyNoTx(); long duration = System.currentTimeMillis() - start; db.tx(tx -> { meshRoot.getChangelogRoot().add(change, duration); tx.success(); }); if (change.requiresReindex()) { flags.requireReindex(); } } catch (Exception e) { log.error("Error while executing change {" + change.getName() + "}/{" + change.getUuid() + "}", e); throw new RuntimeException("Error while executing high level changelog."); } } }); } } >>>>>>> void apply(PostProcessFlags flags, MeshRoot meshRoot);
<<<<<<< public <T extends VertexFrame> Result<T> getVerticesTraversal(Class<T> classOfVertex, String[] fieldNames, Object[] fieldValues) { ======= public Iterable<Vertex> getVerticesForRange(Class<?> classOfVertex, String indexPostfix, String[] fieldNames, Object[] fieldValues, String rangeKey, long start, long end) { OrientBaseGraph orientBaseGraph = unwrapCurrentGraph(); OrientVertexType elementType = orientBaseGraph.getVertexType(classOfVertex.getSimpleName()); String indexName = classOfVertex.getSimpleName() + "_" + indexPostfix; OIndex<?> index = elementType.getClassIndex(indexName); Object startKey = index().createComposedIndexKey(fieldValues[0], start); Object endKey = index().createComposedIndexKey(fieldValues[0], end); OIndexCursor entries = index.getInternal().iterateEntriesBetween(startKey, true, endKey, true, false); return () -> entries.toEntries().stream().map( entry -> { Vertex vertex = new OrientVertex(orientBaseGraph, entry.getValue()); return vertex; }).iterator(); } @Override public <T extends VertexFrame> TraversalResult<T> getVerticesTraversal(Class<T> classOfVertex, String[] fieldNames, Object[] fieldValues) { >>>>>>> public Iterable<Vertex> getVerticesForRange(Class<?> classOfVertex, String indexPostfix, String[] fieldNames, Object[] fieldValues, String rangeKey, long start, long end) { OrientBaseGraph orientBaseGraph = unwrapCurrentGraph(); OrientVertexType elementType = orientBaseGraph.getVertexType(classOfVertex.getSimpleName()); String indexName = classOfVertex.getSimpleName() + "_" + indexPostfix; OIndex<?> index = elementType.getClassIndex(indexName); Object startKey = index().createComposedIndexKey(fieldValues[0], start); Object endKey = index().createComposedIndexKey(fieldValues[0], end); OIndexCursor entries = index.getInternal().iterateEntriesBetween(startKey, true, endKey, true, false); return () -> entries.toEntries().stream().map( entry -> { Vertex vertex = new OrientVertex(orientBaseGraph, entry.getValue()); return vertex; }).iterator(); } @Override public <T extends VertexFrame> Result<T> getVerticesTraversal(Class<T> classOfVertex, String[] fieldNames, Object[] fieldValues) {
<<<<<<< ======= import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.handler.VersionHandler.CURRENT_API_BASE_PATH; import static com.gentics.mesh.test.ClientHelper.call; import static com.gentics.mesh.test.TestDataProvider.PROJECT_NAME; import static com.gentics.mesh.test.TestSize.PROJECT; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; >>>>>>> import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.handler.VersionHandler.CURRENT_API_BASE_PATH; import static com.gentics.mesh.test.ClientHelper.call; import static com.gentics.mesh.test.TestDataProvider.PROJECT_NAME; import static com.gentics.mesh.test.TestSize.PROJECT; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.junit.Before; import org.junit.Ignore; import org.junit.Test;
<<<<<<< import com.gentics.mesh.core.data.impl.MicroschemaContainerImpl; ======= import com.gentics.mesh.core.data.container.impl.MicroschemaContainerImpl; import com.gentics.mesh.core.data.container.impl.SchemaContainerImpl; >>>>>>> import com.gentics.mesh.core.data.container.impl.MicroschemaContainerImpl;
<<<<<<< import com.gentics.mesh.graphdb.Trx; ======= import com.gentics.mesh.core.rest.schema.Schema; import com.gentics.mesh.core.rest.schema.impl.ListFieldSchemaImpl; >>>>>>> import com.gentics.mesh.core.rest.schema.Schema; import com.gentics.mesh.core.rest.schema.impl.ListFieldSchemaImpl; import com.gentics.mesh.graphdb.Trx; <<<<<<< public void testAddContent() throws InterruptedException { ======= public void testAddContent() throws InterruptedException, IOException { SearchQueue searchQueue = boot.meshRoot().getSearchQueue(); Node node = folder("2015"); GraphStringFieldList list = node.getFieldContainer(english()).createStringList("stringList"); list.createString("one"); list.createString("two"); list.createString("three"); list.createString("four"); Schema schema = node.getSchemaContainer().getSchema(); schema.addField(new ListFieldSchemaImpl().setListType("string").setName("stringList")); node.getSchemaContainer().setSchema(schema); >>>>>>> public void testAddContent() throws InterruptedException, IOException { try (Trx tx = new Trx(db)) { SearchQueue searchQueue = boot.meshRoot().getSearchQueue(); Node node = folder("2015"); GraphStringFieldList list = node.getFieldContainer(english()).createStringList("stringList"); list.createString("one"); list.createString("two"); list.createString("three"); list.createString("four"); Schema schema = node.getSchemaContainer().getSchema(); schema.addField(new ListFieldSchemaImpl().setListType("string").setName("stringList")); node.getSchemaContainer().setSchema(schema); tx.success(); } <<<<<<< QueryBuilder qb = QueryBuilders.queryStringQuery("2015"); String json = "{"; json += " \"query\":"+ qb.toString(); json += " }"; log.debug("Query: " + json); Future<NodeListResponse> future = getClient().searchNodes(json, new PagingInfo().setPage(1).setPerPage(2)); ======= String json = "{"; json += " \"sort\" : {"; json += " \"created\" : {\"order\" : \"asc\"}"; json += " },"; json += " \"query\":{"; json += " \"bool\" : {"; json += " \"must\" : {"; json += " \"term\" : { \"fields.stringList\" : \"three\" }"; json += " }"; json += " }"; json += " }"; json += " }"; Future<NodeListResponse> future = getClient().searchNodes(json, new PagingInfo().setPage(1).setPerPage(2)); >>>>>>> String json = "{"; json += " \"sort\" : {"; json += " \"created\" : {\"order\" : \"asc\"}"; json += " },"; json += " \"query\":{"; json += " \"bool\" : {"; json += " \"must\" : {"; json += " \"term\" : { \"fields.stringList\" : \"three\" }"; json += " }"; json += " }"; json += " }"; json += " }"; Future<NodeListResponse> future = getClient().searchNodes(json, new PagingInfo().setPage(1).setPerPage(2));
<<<<<<< return new MeshHttpRequestImpl<>(request, handler, null, null, authentication, "*/*"); } @Override public MeshRequest<NodeResponse> webrootUpdate(String projectName, String path, NodeUpdateRequest nodeUpdateRequest, ParameterProvider... parameters) { Objects.requireNonNull(projectName, "projectName must not be null"); Objects.requireNonNull(path, "path must not be null"); if (!path.startsWith("/")) { throw new RuntimeException("The path {" + path + "} must start with a slash"); } return webrootUpdate(projectName, path.split("/"), nodeUpdateRequest, parameters); } @Override public MeshRequest<NodeResponse> webrootUpdate(String projectName, String[] pathSegments, NodeUpdateRequest nodeUpdateRequest, ParameterProvider... parameters) { Objects.requireNonNull(projectName, "projectName must not be null"); Objects.requireNonNull(pathSegments, "pathSegments must not be null"); Objects.requireNonNull(nodeUpdateRequest, "nodeUpdateRequest must not be null"); String path = Arrays.stream(pathSegments) .filter(segment -> segment != null && !segment.isEmpty()) .map(URIUtils::encodeSegment) .collect(Collectors.joining("/", "/", "")); String requestUri = "/" + encodeSegment(projectName) + "/webroot" + path + getQuery(parameters); return prepareRequest(POST, requestUri, NodeResponse.class, nodeUpdateRequest); } @Override public MeshRequest<NodeResponse> webrootCreate(String projectName, String path, NodeCreateRequest nodeCreateRequest, ParameterProvider... parameters) { Objects.requireNonNull(projectName, "projectName must not be null"); Objects.requireNonNull(path, "path must not be null"); if (!path.startsWith("/")) { throw new RuntimeException("The path {" + path + "} must start with a slash"); } return webrootCreate(projectName, path.split("/"), nodeCreateRequest , parameters); } @Override public MeshRequest<NodeResponse> webrootCreate(String projectName, String[] pathSegments, NodeCreateRequest nodeCreateRequest, ParameterProvider... parameters) { Objects.requireNonNull(projectName, "projectName must not be null"); Objects.requireNonNull(pathSegments, "pathSegments must not be null"); Objects.requireNonNull(nodeCreateRequest, "nodeCreateRequest must not be null"); String path = Arrays.stream(pathSegments) .filter(segment -> segment != null && !segment.isEmpty()) .map(URIUtils::encodeSegment) .collect(Collectors.joining("/", "/", "")); String requestUri = "/" + encodeSegment(projectName) + "/webroot" + path + getQuery(parameters); return prepareRequest(POST, requestUri, NodeResponse.class, nodeCreateRequest); ======= return new MeshHttpRequestImpl<>(request, handler, null, null, authentication, "*/*"); >>>>>>> return new MeshHttpRequestImpl<>(request, handler, null, null, authentication, "*/*"); } @Override public MeshRequest<NodeResponse> webrootUpdate(String projectName, String path, NodeUpdateRequest nodeUpdateRequest, ParameterProvider... parameters) { Objects.requireNonNull(projectName, "projectName must not be null"); Objects.requireNonNull(path, "path must not be null"); if (!path.startsWith("/")) { throw new RuntimeException("The path {" + path + "} must start with a slash"); } return webrootUpdate(projectName, path.split("/"), nodeUpdateRequest, parameters); } @Override public MeshRequest<NodeResponse> webrootUpdate(String projectName, String[] pathSegments, NodeUpdateRequest nodeUpdateRequest, ParameterProvider... parameters) { Objects.requireNonNull(projectName, "projectName must not be null"); Objects.requireNonNull(pathSegments, "pathSegments must not be null"); Objects.requireNonNull(nodeUpdateRequest, "nodeUpdateRequest must not be null"); String path = Arrays.stream(pathSegments) .filter(segment -> segment != null && !segment.isEmpty()) .map(URIUtils::encodeSegment) .collect(Collectors.joining("/", "/", "")); String requestUri = "/" + encodeSegment(projectName) + "/webroot" + path + getQuery(parameters); return prepareRequest(POST, requestUri, NodeResponse.class, nodeUpdateRequest); } @Override public MeshRequest<NodeResponse> webrootCreate(String projectName, String path, NodeCreateRequest nodeCreateRequest, ParameterProvider... parameters) { Objects.requireNonNull(projectName, "projectName must not be null"); Objects.requireNonNull(path, "path must not be null"); if (!path.startsWith("/")) { throw new RuntimeException("The path {" + path + "} must start with a slash"); } return webrootCreate(projectName, path.split("/"), nodeCreateRequest , parameters); } @Override public MeshRequest<NodeResponse> webrootCreate(String projectName, String[] pathSegments, NodeCreateRequest nodeCreateRequest, ParameterProvider... parameters) { Objects.requireNonNull(projectName, "projectName must not be null"); Objects.requireNonNull(pathSegments, "pathSegments must not be null"); Objects.requireNonNull(nodeCreateRequest, "nodeCreateRequest must not be null"); String path = Arrays.stream(pathSegments) .filter(segment -> segment != null && !segment.isEmpty()) .map(URIUtils::encodeSegment) .collect(Collectors.joining("/", "/", "")); String requestUri = "/" + encodeSegment(projectName) + "/webroot" + path + getQuery(parameters); return prepareRequest(POST, requestUri, NodeResponse.class, nodeCreateRequest); return new MeshHttpRequestImpl<>(request, handler, null, null, authentication, "*/*");
<<<<<<< import com.gentics.mesh.auth.MeshJWTAuthProvider; ======= import com.gentics.mesh.auth.MeshBasicAuthHandler; >>>>>>> import com.gentics.mesh.auth.MeshJWTAuthProvider; import com.gentics.mesh.auth.MeshBasicAuthHandler; <<<<<<< switch (Mesh.mesh().getOptions().getAuthenticationOptions().getAuthenticationMethod()) { case JWT: return JWTAuthHandler.create(authProvider()); case BASIC_AUTH: default: return BasicAuthHandler.create(authProvider(), BasicAuthHandler.DEFAULT_REALM); } ======= return new MeshBasicAuthHandler(authProvider()); >>>>>>> switch (Mesh.mesh().getOptions().getAuthenticationOptions().getAuthenticationMethod()) { case JWT: return JWTAuthHandler.create(authProvider()); case BASIC_AUTH: default: return new MeshBasicAuthHandler(authProvider()); }
<<<<<<< public MeshContainer addSlave(String string, String name, String name2, boolean b) { return addSlave(string, name, name2, b, -1); ======= public MeshDockerServer addSlave(String clusterName, String nodeName, String dataPathPostfix, boolean clearFolders) { return addSlave(clusterName, nodeName, dataPathPostfix, clearFolders, -1); >>>>>>> public MeshContainer addSlave(String clusterName, String nodeName, String dataPathPostfix, boolean clearFolders) { return addSlave(clusterName, nodeName, dataPathPostfix, clearFolders, -1); <<<<<<< protected MeshContainer addSlave(String clusterName, String nodeName, String dataPathPostfix, boolean clearFolders, int writeQuorum) { MeshContainer server = new MeshContainer(MeshContainer.LOCAL_PROVIDER) ======= protected MeshDockerServer addSlave(String clusterName, String nodeName, String dataPathPostfix, boolean clearFolders, int writeQuorum) { MeshDockerServer server = prepareSlave(clusterName, nodeName, dataPathPostfix, clearFolders, writeQuorum); server.start(); return server; } protected MeshDockerServer prepareSlave(String clusterName, String nodeName, String dataPathPostfix, boolean clearFolders, int writeQuorum) { MeshDockerServer server = new MeshDockerServer(vertx) >>>>>>> protected MeshContainer addSlave(String clusterName, String nodeName, String dataPathPostfix, boolean clearFolders, int writeQuorum) { MeshContainer server = prepareSlave(clusterName, nodeName, dataPathPostfix, clearFolders, writeQuorum); server.start(); return server; } protected MeshContainer prepareSlave(String clusterName, String nodeName, String dataPathPostfix, boolean clearFolders, int writeQuorum) { MeshContainer server = new MeshContainer(MeshContainer.LOCAL_PROVIDER)
<<<<<<< import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.core.data.relationship.GraphPermission.CREATE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.DELETE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.READ_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.UPDATE_PERM; import static com.gentics.mesh.core.rest.MeshEvent.ROLE_CREATED; import static com.gentics.mesh.core.rest.MeshEvent.ROLE_DELETED; import static com.gentics.mesh.core.rest.MeshEvent.ROLE_UPDATED; import static com.gentics.mesh.test.ClientHelper.call; import static com.gentics.mesh.test.ClientHelper.validateDeletion; import static com.gentics.mesh.test.TestSize.PROJECT; import static com.gentics.mesh.test.context.ElasticsearchTestMode.TRACKING; import static com.gentics.mesh.test.context.MeshTestHelper.awaitConcurrentRequests; import static com.gentics.mesh.test.util.MeshAssert.assertElement; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.CONFLICT; import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import org.junit.Ignore; import org.junit.Test; ======= >>>>>>> import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.core.data.relationship.GraphPermission.CREATE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.DELETE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.READ_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.UPDATE_PERM; import static com.gentics.mesh.core.rest.MeshEvent.ROLE_CREATED; import static com.gentics.mesh.core.rest.MeshEvent.ROLE_DELETED; import static com.gentics.mesh.core.rest.MeshEvent.ROLE_UPDATED; import static com.gentics.mesh.test.ClientHelper.call; import static com.gentics.mesh.test.ClientHelper.validateDeletion; import static com.gentics.mesh.test.TestSize.PROJECT; import static com.gentics.mesh.test.context.ElasticsearchTestMode.TRACKING; import static com.gentics.mesh.test.context.MeshTestHelper.awaitConcurrentRequests; import static com.gentics.mesh.test.util.MeshAssert.assertElement; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.CONFLICT; import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import org.junit.Ignore; import org.junit.Test; <<<<<<< ======= import com.gentics.mesh.core.data.Group; >>>>>>> import com.syncleus.ferma.tx.Tx; <<<<<<< import com.syncleus.ferma.tx.Tx; ======= import com.syncleus.ferma.tx.Tx; >>>>>>> <<<<<<< @MeshTestSetting(elasticsearch = TRACKING, testSize = PROJECT, startServer = true) ======= import org.junit.Ignore; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.core.data.relationship.GraphPermission.CREATE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.DELETE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.READ_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.UPDATE_PERM; import static com.gentics.mesh.test.ClientHelper.call; import static com.gentics.mesh.test.ClientHelper.validateDeletion; import static com.gentics.mesh.test.TestSize.PROJECT; import static com.gentics.mesh.test.context.MeshTestHelper.awaitConcurrentRequests; import static com.gentics.mesh.test.util.MeshAssert.assertElement; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.CONFLICT; import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @MeshTestSetting(useElasticsearch = false, testSize = PROJECT, startServer = true) >>>>>>> @MeshTestSetting(elasticsearch = TRACKING, testSize = PROJECT, startServer = true)
<<<<<<< ======= import static com.gentics.mesh.core.rest.admin.migration.MigrationStatus.COMPLETED; import static com.gentics.mesh.core.rest.admin.migration.MigrationStatus.RUNNING; import static com.gentics.mesh.core.rest.common.ContainerType.DRAFT; import static com.gentics.mesh.core.rest.common.ContainerType.PUBLISHED; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.inject.Inject; import javax.inject.Singleton; import com.gentics.mesh.context.MicronodeMigrationContext; >>>>>>> import com.gentics.mesh.context.MicronodeMigrationContext; <<<<<<< import com.syncleus.ferma.tx.Tx; ======= >>>>>>> <<<<<<< public Completable migrateMicronodes(Branch branch, MicroschemaContainerVersion fromVersion, MicroschemaContainerVersion toVersion, @NotNull MigrationStatusHandler status) { // Collect the migration scripts NodeMigrationActionContextImpl ac = new NodeMigrationActionContextImpl(); List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts = new ArrayList<>(); Set<String> touchedFields = new HashSet<>(); try (Tx tx = db.tx()) { prepareMigration(fromVersion, touchedFields); ac.setProject(branch.getProject()); ac.setBranch(branch); if (status != null) { status.setStatus(RUNNING); status.commit(); } tx.success(); } catch (IOException e) { return Completable.error(e); } ======= public Completable migrateMicronodes(MicronodeMigrationContext context) { context.validate(); return Completable.defer(() -> { Branch branch = context.getBranch(); MicroschemaContainerVersion fromVersion = context.getFromVersion(); MicroschemaContainerVersion toVersion = context.getToVersion(); MigrationStatusHandler status = context.getStatus(); MicroschemaMigrationCause cause = context.getCause(); // Collect the migration scripts NodeMigrationActionContextImpl ac = new NodeMigrationActionContextImpl(); List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts = new ArrayList<>(); Set<String> touchedFields = new HashSet<>(); try { db.tx(() -> { prepareMigration(fromVersion, migrationScripts, touchedFields); >>>>>>> public Completable migrateMicronodes(MicronodeMigrationContext context) { context.validate(); return Completable.defer(() -> { Branch branch = context.getBranch(); MicroschemaContainerVersion fromVersion = context.getFromVersion(); MicroschemaContainerVersion toVersion = context.getToVersion(); MigrationStatusHandler status = context.getStatus(); MicroschemaMigrationCause cause = context.getCause(); // Collect the migration scripts NodeMigrationActionContextImpl ac = new NodeMigrationActionContextImpl(); List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts = new ArrayList<>(); Set<String> touchedFields = new HashSet<>(); try { db.tx(() -> { prepareMigration(fromVersion, touchedFields); <<<<<<< List<Exception> errorsDetected = migrateLoop(fieldContainersResult, status, (batch, container, errors) -> migrateMicronodeContainer(ac, batch, branch, fromVersion, toVersion, container, touchedFields, errors) ); ======= // Get the containers, that need to be transformed List<? extends NodeGraphFieldContainer> fieldContainersResult = db.tx(() -> { return fromVersion.getDraftFieldContainers(branch.getUuid()).list(); }); >>>>>>> // Get the containers, that need to be transformed List<? extends NodeGraphFieldContainer> fieldContainersResult = db.tx(() -> { return fromVersion.getDraftFieldContainers(branch.getUuid()).list(); }); <<<<<<< private void migrateMicronodeContainer(NodeMigrationActionContextImpl ac, SearchQueueBatch batch, Branch branch, MicroschemaContainerVersion fromVersion, MicroschemaContainerVersion toVersion, NodeGraphFieldContainer container, Set<String> touchedFields, List<Exception> errorsDetected) { ======= private void migrateMicronodeContainer(NodeMigrationActionContextImpl ac, EventQueueBatch batch, Branch branch, MicroschemaContainerVersion fromVersion, MicroschemaContainerVersion toVersion, NodeGraphFieldContainer container, Set<String> touchedFields, List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts, List<Exception> errorsDetected) { >>>>>>> private void migrateMicronodeContainer(NodeMigrationActionContextImpl ac, EventQueueBatch batch, Branch branch, MicroschemaContainerVersion fromVersion, MicroschemaContainerVersion toVersion, NodeGraphFieldContainer container, Set<String> touchedFields, List<Exception> errorsDetected) { <<<<<<< migrateMicronodeFields(ac, migrated, fromVersion, toVersion, touchedFields); sqb.store(migrated, branchUuid, PUBLISHED, false); ======= migrateMicronodeFields(ac, migrated, fromVersion, toVersion, touchedFields, migrationScripts); sqb.add(migrated.onUpdated(branchUuid, PUBLISHED)); >>>>>>> migrateMicronodeFields(ac, migrated, fromVersion, toVersion, touchedFields); sqb.add(migrated.onUpdated(branchUuid, PUBLISHED));
<<<<<<< import com.gentics.mesh.core.data.schema.SchemaFieldChange; import com.gentics.mesh.core.data.schema.impl.FieldTypeChangeImpl; ======= import com.gentics.mesh.core.data.schema.impl.AddFieldChangeImpl; import com.gentics.mesh.core.data.schema.impl.FieldTypeChangeImpl; >>>>>>> import com.gentics.mesh.core.data.schema.SchemaFieldChange; import com.gentics.mesh.core.data.schema.impl.AddFieldChangeImpl; import com.gentics.mesh.core.data.schema.impl.FieldTypeChangeImpl;
<<<<<<< import com.gentics.mesh.core.rest.MeshEvent; ======= >>>>>>>
<<<<<<< ======= import static com.gentics.mesh.core.data.ContainerType.DRAFT; import static com.gentics.mesh.core.data.ContainerType.INITIAL; import static com.gentics.mesh.core.data.ContainerType.PUBLISHED; import java.util.List; import java.util.Objects; import java.util.Set; import com.gentics.mesh.context.BulkActionContext; >>>>>>> import static com.gentics.mesh.core.data.ContainerType.DRAFT; import static com.gentics.mesh.core.data.ContainerType.INITIAL; import static com.gentics.mesh.core.data.ContainerType.PUBLISHED; import java.util.List; import java.util.Objects; import java.util.Set; import com.gentics.mesh.context.BulkActionContext; <<<<<<< * @param branchUuid * branch Uuid ======= * @param ac * @param releaseUuid * release Uuid >>>>>>> * @param ac * @param branchUuid * branch Uuid <<<<<<< void updateWebrootPathInfo(String branchUuid, String conflictI18n); ======= void updateWebrootPathInfo(InternalActionContext ac, String releaseUuid, String conflictI18n); /** * Update the property webroot path info. This will also check for uniqueness conflicts of the webroot path and will throw a * {@link Errors#conflict(String, String, String, String...)} if one found. * * @param releaseUuid * @param conflictI18n */ default void updateWebrootPathInfo(String releaseUuid, String conflictI18n) { updateWebrootPathInfo(null, releaseUuid, conflictI18n); } >>>>>>> void updateWebrootPathInfo(InternalActionContext ac, String branchUuid, String conflictI18n); /** * Update the property webroot path info. This will also check for uniqueness conflicts of the webroot path and will throw a * {@link Errors#conflict(String, String, String, String...)} if one found. * * @param branchUuid * @param conflictI18n */ default void updateWebrootPathInfo(String branchUuid, String conflictI18n) { updateWebrootPathInfo(null, branchUuid, conflictI18n); } <<<<<<< * Check whether this field container is the initial version for any * branch. ======= * Check whether this field container is the initial version for any release. >>>>>>> * Check whether this field container is the initial version for any * branch. <<<<<<< * Check whether this field container is the initial version for the given * branch. ======= * Check whether this field container is the initial version for the given release. >>>>>>> * Check whether this field container is the initial version for the given * branch.
<<<<<<< @Override public Single<Boolean> isAvailable() { try { return client.clusterHealth().async() .timeout(1, TimeUnit.SECONDS) .map(ignore -> true) .onErrorReturnItem(false); } catch (HttpErrorException e) { return Single.just(false); } } ======= @Override public boolean isActive() { return client != null; } >>>>>>> @Override public Single<Boolean> isAvailable() { try { return client.clusterHealth().async() .timeout(1, TimeUnit.SECONDS) .map(ignore -> true) .onErrorReturnItem(false); } catch (HttpErrorException e) { return Single.just(false); } } @Override public boolean isActive() { return client != null; }
<<<<<<< import com.gentics.mesh.util.UUIDUtil; import static com.gentics.mesh.test.TestSize.FULL; ======= >>>>>>> import com.gentics.mesh.util.UUIDUtil; import static com.gentics.mesh.test.TestSize.FULL; <<<<<<< call(() -> client().updateRelease(projectName, "bogus", request), BAD_REQUEST, "error_illegal_uuid", "bogus"); ======= call(() -> client().updateRelease(PROJECT_NAME, "bogus", request), NOT_FOUND, "object_not_found_for_uuid", "bogus"); >>>>>>> call(() -> client().updateRelease(PROJECT_NAME, "bogus", request), NOT_FOUND, "object_not_found_for_uuid", "bogus");
<<<<<<< ======= import static com.gentics.mesh.core.rest.common.ContainerType.DRAFT; import static com.gentics.mesh.core.rest.common.ContainerType.PUBLISHED; import static com.gentics.mesh.core.rest.job.JobStatus.COMPLETED; import static com.gentics.mesh.core.rest.job.JobStatus.RUNNING; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.inject.Inject; import javax.inject.Singleton; >>>>>>> import static com.gentics.mesh.core.rest.common.ContainerType.DRAFT; import static com.gentics.mesh.core.rest.common.ContainerType.PUBLISHED; import static com.gentics.mesh.core.rest.job.JobStatus.COMPLETED; import static com.gentics.mesh.core.rest.job.JobStatus.RUNNING; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.inject.Inject; import javax.inject.Singleton; <<<<<<< MicroschemaContainerVersion fromVersion, MicroschemaContainerVersion toVersion, NodeGraphFieldContainer container, Set<String> touchedFields, List<Exception> errorsDetected) { ======= MicroschemaContainerVersion fromVersion, MicroschemaContainerVersion toVersion, NodeGraphFieldContainer container, Set<String> touchedFields, List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts, List<Exception> errorsDetected) { String containerUuid = container.getUuid(); >>>>>>> MicroschemaContainerVersion fromVersion, MicroschemaContainerVersion toVersion, NodeGraphFieldContainer container, Set<String> touchedFields, List<Exception> errorsDetected) { String containerUuid = container.getUuid(); <<<<<<< migrateDraftContainer(ac, batch, branch, node, container, fromVersion, toVersion, touchedFields, nextDraftVersion); ======= migrateDraftContainer(ac, batch, branch, node, container, fromVersion, toVersion, touchedFields, migrationScripts, nextDraftVersion); postMigrationPurge(container, oldPublished); >>>>>>> migrateDraftContainer(ac, batch, branch, node, container, fromVersion, toVersion, touchedFields, nextDraftVersion); postMigrationPurge(container, oldPublished);
<<<<<<< ======= import com.google.api.services.youtube.model.Playlist; import com.google.api.services.youtube.model.SearchResult; import com.google.api.services.youtube.model.Video; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; >>>>>>>
<<<<<<< ======= import static com.gentics.mesh.core.rest.admin.migration.MigrationStatus.COMPLETED; import static com.gentics.mesh.core.rest.admin.migration.MigrationStatus.RUNNING; import static com.gentics.mesh.core.rest.common.ContainerType.DRAFT; import static com.gentics.mesh.core.rest.common.ContainerType.PUBLISHED; import static com.gentics.mesh.metric.Metrics.NODE_MIGRATION_PENDING; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.inject.Inject; import javax.inject.Singleton; >>>>>>> <<<<<<< import com.syncleus.ferma.tx.Tx; ======= >>>>>>> <<<<<<< public Completable migrateNodes(NodeMigrationActionContextImpl ac, Project project, Branch branch, SchemaContainerVersion fromVersion, SchemaContainerVersion toVersion, @NotNull MigrationStatusHandler status) { // Prepare the migration - Collect the migration scripts Set<String> touchedFields = new HashSet<>(); SchemaModel newSchema = db.tx(() -> toVersion.getSchema()); try (Tx tx = db.tx()) { prepareMigration(fromVersion, touchedFields); ac.setProject(project); ac.setBranch(branch); if (status != null) { status.setStatus(RUNNING); status.commit(); } tx.success(); } catch (IOException e) { log.error("Error while preparing migration"); return Completable.error(e); } // Get the draft containers that need to be transformed. Containers which need to be transformed are those which are still linked to older schema // versions. We'll work on drafts. The migration code will later on also handle publish versions. List<? extends NodeGraphFieldContainer> containers = db.tx(() -> { Iterator<? extends NodeGraphFieldContainer> it = fromVersion.getDraftFieldContainers(branch.getUuid()); return Lists.newArrayList(it); }); if (metrics.isEnabled()) { migrationCounter.reset(); migrationCounter.inc(containers.size()); } // No field containers, migration is done if (containers.isEmpty()) { if (status != null) { ======= public Completable migrateNodes(NodeMigrationActionContextImpl context) { context.validate(); return Completable.defer(() -> { SchemaContainerVersion fromVersion = context.getFromVersion(); SchemaContainerVersion toVersion = context.getToVersion(); SchemaMigrationCause cause = context.getCause(); Branch branch = context.getBranch(); MigrationStatusHandler status = context.getStatus(); // Prepare the migration - Collect the migration scripts List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts = new ArrayList<>(); Set<String> touchedFields = new HashSet<>(); SchemaModel newSchema = db.tx(() -> toVersion.getSchema()); try { >>>>>>> public Completable migrateNodes(NodeMigrationActionContextImpl context) { context.validate(); return Completable.defer(() -> { SchemaContainerVersion fromVersion = context.getFromVersion(); SchemaContainerVersion toVersion = context.getToVersion(); SchemaMigrationCause cause = context.getCause(); Branch branch = context.getBranch(); MigrationStatusHandler status = context.getStatus(); // Prepare the migration - Collect the migration scripts Set<String> touchedFields = new HashSet<>(); SchemaModel newSchema = db.tx(() -> toVersion.getSchema()); try { <<<<<<< List<Exception> errorsDetected = migrateLoop(containers, status, (batch, container, errors) -> { migrateContainer(ac, batch, container, fromVersion, toVersion, branch, newSchema, errors, touchedFields); ======= // Get the draft containers that need to be transformed. Containers which need to be transformed are those which are still linked to older schema // versions. We'll work on drafts. The migration code will later on also handle publish versions. List<? extends NodeGraphFieldContainer> containers = db.tx(() -> { Iterator<? extends NodeGraphFieldContainer> it = fromVersion.getDraftFieldContainers(branch.getUuid()); return Lists.newArrayList(it); }); >>>>>>> // Get the draft containers that need to be transformed. Containers which need to be transformed are those which are still linked to older schema // versions. We'll work on drafts. The migration code will later on also handle publish versions. List<? extends NodeGraphFieldContainer> containers = db.tx(() -> { Iterator<? extends NodeGraphFieldContainer> it = fromVersion.getDraftFieldContainers(branch.getUuid()); return Lists.newArrayList(it); }); <<<<<<< private void migrateContainer(NodeMigrationActionContextImpl ac, SearchQueueBatch batch, NodeGraphFieldContainer container, GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?> fromVersion, SchemaContainerVersion toVersion, Branch branch, SchemaModel newSchema, List<Exception> errorsDetected, Set<String> touchedFields) { ======= private void migrateContainer(NodeMigrationActionContextImpl ac, EventQueueBatch batch, NodeGraphFieldContainer container, List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts, SchemaModel newSchema, List<Exception> errorsDetected, Set<String> touchedFields) { >>>>>>> private void migrateContainer(NodeMigrationActionContextImpl ac, EventQueueBatch batch, NodeGraphFieldContainer container, GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?> fromVersion, SchemaModel newSchema, List<Exception> errorsDetected, Set<String> touchedFields) { <<<<<<< private void migrateDraftContainer(NodeMigrationActionContextImpl ac, SearchQueueBatch sqb, Branch branch, Node node, NodeGraphFieldContainer container, GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?> fromVersion, SchemaContainerVersion toVersion, Set<String> touchedFields, SchemaModel newSchema, VersionNumber nextDraftVersion) ======= private void migrateDraftContainer(NodeMigrationActionContextImpl ac, EventQueueBatch sqb, Branch branch, Node node, NodeGraphFieldContainer container, SchemaContainerVersion toVersion, Set<String> touchedFields, List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts, SchemaModel newSchema, VersionNumber nextDraftVersion) >>>>>>> private void migrateDraftContainer(NodeMigrationActionContextImpl ac, EventQueueBatch sqb, Branch branch, Node node, NodeGraphFieldContainer container, GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?> fromVersion, SchemaContainerVersion toVersion, Set<String> touchedFields, SchemaModel newSchema, VersionNumber nextDraftVersion) <<<<<<< private VersionNumber migratePublishedContainer(NodeMigrationActionContextImpl ac, SearchQueueBatch sqb, Branch branch, Node node, NodeGraphFieldContainer container, GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?> fromVersion, SchemaContainerVersion toVersion, Set<String> touchedFields, SchemaModel newSchema) throws Exception { ======= private VersionNumber migratePublishedContainer(NodeMigrationActionContextImpl ac, EventQueueBatch sqb, Branch branch, Node node, NodeGraphFieldContainer container, SchemaContainerVersion toVersion, Set<String> touchedFields, List<Tuple<String, List<Tuple<String, Object>>>> migrationScripts, SchemaModel newSchema) throws Exception { >>>>>>> private VersionNumber migratePublishedContainer(NodeMigrationActionContextImpl ac, EventQueueBatch sqb, Branch branch, Node node, NodeGraphFieldContainer container, GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?> fromVersion, SchemaContainerVersion toVersion, Set<String> touchedFields, SchemaModel newSchema) throws Exception { <<<<<<< migrate(ac, migrated, restModel, fromVersion, toVersion, touchedFields); sqb.store(migrated, branchUuid, PUBLISHED, false); ======= migrate(ac, migrated, restModel, toVersion, touchedFields, migrationScripts, NodeUpdateRequest.class); sqb.add(migrated.onUpdated(branchUuid, PUBLISHED)); >>>>>>> migrate(ac, migrated, restModel, fromVersion, toVersion, touchedFields); sqb.add(migrated.onUpdated(branchUuid, PUBLISHED));
<<<<<<< public String getPath(ActionContext ac, String branchUuid, ContainerType type, String... languageTag) { ======= public void postfixPathSegment(String releaseUuid, ContainerType type, String languageTag) { // Check whether this node is the base node. if (getParentNode(releaseUuid) == null) { return; } // Find the first matching container and fallback to other listed languages NodeGraphFieldContainer container = getGraphFieldContainer(languageTag, releaseUuid, type); if (container != null) { container.postfixSegmentFieldValue(); } } @Override public String getPath(ActionContext ac, String releaseUuid, ContainerType type, String... languageTag) { >>>>>>> public void postfixPathSegment(String branchUuid, ContainerType type, String languageTag) { // Check whether this node is the base node. if (getParentNode(branchUuid) == null) { return; } // Find the first matching container and fallback to other listed languages NodeGraphFieldContainer container = getGraphFieldContainer(languageTag, branchUuid, type); if (container != null) { container.postfixSegmentFieldValue(); } } @Override public String getPath(ActionContext ac, String branchUuid, ContainerType type, String... languageTag) { <<<<<<< Branch branch = ac.getBranch(getProject()); restNode.setAvailableLanguages(getLanguageInfo(ac)); setFields(ac, branch, restNode, level, languageTags); setParentNodeInfo(ac, branch, restNode); setRolePermissions(ac, restNode); setChildrenInfo(ac, branch, restNode); setTagsToRest(ac, restNode, branch); fillCommonRestFields(ac, restNode); setBreadcrumbToRest(ac, restNode); setPathsToRest(ac, restNode, branch); setProjectReference(ac, restNode); ======= Release release = ac.getRelease(getProject()); if (fields.has("languages")) { restNode.setAvailableLanguages(getLanguageInfo(ac)); } if (fields.has("fields")) { setFields(ac, release, restNode, level, languageTags); } if (fields.has("parent")) { setParentNodeInfo(ac, release, restNode); } if (fields.has("perms")) { setRolePermissions(ac, restNode); } if (fields.has("children")) { setChildrenInfo(ac, release, restNode); } if (fields.has("tags")) { setTagsToRest(ac, restNode, release); } fillCommonRestFields(ac, fields, restNode); if (fields.has("breadcrumb")) { setBreadcrumbToRest(ac, restNode); } if (fields.has("path")) { setPathsToRest(ac, restNode, release); } if (fields.has("project")) { setProjectReference(ac, restNode); } >>>>>>> Branch branch = ac.getBranch(getProject()); if (fields.has("languages")) { restNode.setAvailableLanguages(getLanguageInfo(ac)); } if (fields.has("fields")) { setFields(ac, branch, restNode, level, languageTags); } if (fields.has("parent")) { setParentNodeInfo(ac, branch, restNode); } if (fields.has("perms")) { setRolePermissions(ac, restNode); } if (fields.has("children")) { setChildrenInfo(ac, branch, restNode); } if (fields.has("tags")) { setTagsToRest(ac, restNode, branch); } fillCommonRestFields(ac, fields, restNode); if (fields.has("breadcrumb")) { setBreadcrumbToRest(ac, restNode); } if (fields.has("path")) { setPathsToRest(ac, restNode, branch); } if (fields.has("project")) { setProjectReference(ac, restNode); } <<<<<<< public void publish(InternalActionContext ac, Branch branch, SearchQueueBatch batch) { String branchUuid = branch.getUuid(); ======= public void publish(InternalActionContext ac, Release release, BulkActionContext bac) { String releaseUuid = release.getUuid(); >>>>>>> public void publish(InternalActionContext ac, Branch branch, BulkActionContext bac) { String branchUuid = branch.getUuid(); <<<<<<< batch.store(this, branchUuid, ContainerType.PUBLISHED, false); ======= bac.batch().store(this, releaseUuid, ContainerType.PUBLISHED, false); >>>>>>> bac.batch().store(this, branchUuid, ContainerType.PUBLISHED, false); <<<<<<< child.publish(ac, branch, batch); ======= child.publish(ac, release, bac); >>>>>>> child.publish(ac, branch, bac); <<<<<<< public void publish(InternalActionContext ac, SearchQueueBatch batch) { Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); ======= public void publish(InternalActionContext ac, BulkActionContext bac) { Release release = ac.getRelease(getProject()); String releaseUuid = release.getUuid(); >>>>>>> public void publish(InternalActionContext ac, BulkActionContext bac) { Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); <<<<<<< unpublishedContainers.stream().forEach(c -> publish(c.getLanguage(), branch, ac.getUser())); ======= unpublishedContainers.stream().forEach(c -> publish(c.getLanguage(), release, ac.getUser())); bac.batch().store(this, releaseUuid, PUBLISHED, false); assertPublishConsistency(ac, release); >>>>>>> unpublishedContainers.stream().forEach(c -> publish(c.getLanguage(), branch, ac.getUser())); bac.batch().store(this, branchUuid, PUBLISHED, false); assertPublishConsistency(ac, branch); <<<<<<< List<? extends NodeGraphFieldContainer> publishedContainers = getGraphFieldContainers(branchUuid, PUBLISHED); getGraphFieldContainerEdges(branchUuid, PUBLISHED).stream().forEach(EdgeFrame::remove); ======= List<? extends NodeGraphFieldContainer> publishedContainers = getGraphFieldContainers(releaseUuid, PUBLISHED); getGraphFieldContainerEdges(releaseUuid, PUBLISHED).stream().forEach(EdgeFrame::remove); >>>>>>> List<? extends NodeGraphFieldContainer> publishedContainers = getGraphFieldContainers(branchUuid, PUBLISHED); getGraphFieldContainerEdges(branchUuid, PUBLISHED).stream().forEach(EdgeFrame::remove); <<<<<<< // Handle recursion if (parameters.isRecursive()) { for (Node node : getChildren()) { node.takeOffline(ac, batch, branch, parameters); } } assertPublishConsistency(ac, branch); ======= assertPublishConsistency(ac, release); >>>>>>> assertPublishConsistency(ac, branch); <<<<<<< batch.delete(container, branchUuid, PUBLISHED, false); ======= bac.batch().delete(container, releaseUuid, PUBLISHED, false); >>>>>>> bac.batch().delete(container, branchUuid, PUBLISHED, false); <<<<<<< takeOffline(ac, batch, branch, parameters); ======= takeOffline(ac, bac, release, parameters); >>>>>>> takeOffline(ac, bac, branch, parameters); <<<<<<< public void publish(InternalActionContext ac, SearchQueueBatch batch, String languageTag) { Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); ======= public void publish(InternalActionContext ac, BulkActionContext bac, String languageTag) { Release release = ac.getRelease(getProject()); String releaseUuid = release.getUuid(); >>>>>>> public void publish(InternalActionContext ac, BulkActionContext bac, String languageTag) { Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); <<<<<<< batch.delete(published, branchUuid, PUBLISHED, false); ======= bac.batch().delete(published, releaseUuid, PUBLISHED, false); bac.process(); >>>>>>> bac.batch().delete(published, branchUuid, PUBLISHED, false); bac.process(); <<<<<<< public void deleteFromBranch(InternalActionContext ac, Branch branch, SearchQueueBatch batch, boolean ignoreChecks) { ======= public void deleteFromRelease(InternalActionContext ac, Release release, BulkActionContext context, boolean ignoreChecks) { >>>>>>> public void deleteFromBranch(InternalActionContext ac, Branch branch, BulkActionContext context, boolean ignoreChecks) { <<<<<<< child.deleteFromBranch(ac, branch, batch, ignoreChecks); ======= child.deleteFromRelease(ac, release, context, ignoreChecks); >>>>>>> child.deleteFromBranch(ac, branch, context, ignoreChecks); <<<<<<< for (NodeGraphFieldContainer container : getGraphFieldContainers(branch, DRAFT)) { deleteLanguageContainer(ac, branch, container.getLanguage(), batch, false); ======= for (NodeGraphFieldContainer container : getGraphFieldContainers(release, DRAFT)) { deleteLanguageContainer(ac, release, container.getLanguage(), context, false); >>>>>>> for (NodeGraphFieldContainer container : getGraphFieldContainers(branch, DRAFT)) { deleteLanguageContainer(ac, branch, container.getLanguage(), context, false); <<<<<<< public void deleteLanguageContainer(InternalActionContext ac, Branch branch, Language language, SearchQueueBatch batch, ======= public void deleteLanguageContainer(InternalActionContext ac, Release release, Language language, BulkActionContext context, >>>>>>> public void deleteLanguageContainer(InternalActionContext ac, Branch branch, Language language, BulkActionContext context, <<<<<<< takeOffline(ac, batch, branch, language.getLanguageTag()); ======= takeOffline(ac, context, release, language.getLanguageTag()); >>>>>>> takeOffline(ac, context, branch, language.getLanguageTag()); <<<<<<< container.deleteFromBranch(branch, batch); ======= container.deleteFromRelease(release, context); >>>>>>> container.deleteFromBranch(branch, context); <<<<<<< initial.inE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branch.getUuid()) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, ContainerType.INITIAL.getCode()).removeAll(); ======= initial.inE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.RELEASE_UUID_KEY, release.getUuid()) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, ContainerType.INITIAL.getCode()).removeAll(); >>>>>>> initial.inE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branch.getUuid()) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, ContainerType.INITIAL.getCode()).removeAll(); <<<<<<< deleteFromBranch(ac, branch, batch, false); ======= deleteFromRelease(ac, release, context, false); >>>>>>> deleteFromBranch(ac, branch, context, false);
<<<<<<< import java.nio.charset.StandardCharsets; ======= import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; >>>>>>> import java.nio.charset.StandardCharsets; <<<<<<< /** * Loads a resource and converts to a POJO using {@link JsonUtil#readValue(String, Class)} * * @param path * @param clazz * @param <T> * @return */ default <T> T loadResourceJsonAsPojo(String path, Class<T> clazz) { try { return JsonUtil.readValue( Resources.toString( Resources.getResource(path), StandardCharsets.UTF_8), clazz); } catch (IOException e) { throw new RuntimeException(e); } } ======= default int threadCount() { ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); int i = 0; for (ThreadInfo threadInfo : threadMXBean.dumpAllThreads(true, true)) { i++; } return i; } >>>>>>> /** * Loads a resource and converts to a POJO using {@link JsonUtil#readValue(String, Class)} * * @param path * @param clazz * @param <T> * @return */ default <T> T loadResourceJsonAsPojo(String path, Class<T> clazz) { try { return JsonUtil.readValue( Resources.toString( Resources.getResource(path), StandardCharsets.UTF_8), clazz); } catch (IOException e) { throw new RuntimeException(e); } } default int threadCount() { ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); int i = 0; for (ThreadInfo threadInfo : threadMXBean.dumpAllThreads(true, true)) { i++; } return i; }
<<<<<<< import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.util.MeshAssert.assertDeleted; ======= import static com.gentics.mesh.core.data.search.SearchQueueEntryAction.DELETE_ACTION; import static com.gentics.mesh.util.MeshAssert.assertAffectedElements; >>>>>>> import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.core.data.search.SearchQueueEntryAction.DELETE_ACTION; import static com.gentics.mesh.util.MeshAssert.assertAffectedElements; <<<<<<< ======= >>>>>>> <<<<<<< ======= import com.gentics.mesh.core.data.search.SearchQueueBatch; import com.gentics.mesh.core.data.search.SearchQueueEntry; import com.gentics.mesh.core.data.search.SearchQueueEntryAction; >>>>>>> import com.gentics.mesh.core.data.search.SearchQueueBatch;
<<<<<<< /** * Check the result object and fail early when the result failed as well. * * @param result * Result that will be checked * @return false when the result failed, otherwise true */ @Override public boolean failOnError(AsyncResult<?> result) { if (result.failed()) { fail(result.cause()); return false; } return true; } ======= @Override public void addCookie(Cookie cookie) { getRoutingContext().addCookie(cookie); } >>>>>>> /** * Check the result object and fail early when the result failed as well. * * @param result * Result that will be checked * @return false when the result failed, otherwise true */ @Override public boolean failOnError(AsyncResult<?> result) { if (result.failed()) { fail(result.cause()); return false; } return true; } @Override public void addCookie(Cookie cookie) { getRoutingContext().addCookie(cookie); }
<<<<<<< import com.gentics.mesh.core.data.user.HibUser; ======= >>>>>>> import com.gentics.mesh.core.data.user.HibUser; import com.gentics.mesh.core.data.root.UserRoot; <<<<<<< HibUser user = ac.getUser(); ======= TagDaoWrapper tagDao = Tx.get().data().tagDao(); User user = ac.getUser(); >>>>>>> HibUser user = ac.getUser(); TagDaoWrapper tagDao = Tx.get().data().tagDao();
<<<<<<< @MeshTestSetting(useElasticsearch = false, testSize = FULL, startServer = false) ======= import javax.script.ScriptException; import org.junit.Test; import com.gentics.mesh.core.field.string.StringFieldTestHelper; import com.gentics.mesh.test.context.MeshTestSetting; @MeshTestSetting(testSize = FULL, startServer = false) >>>>>>> @MeshTestSetting(testSize = FULL, startServer = false)
<<<<<<< default MicroschemaContainer create(Microschema microschema, User user) { return create(microschema, user, null); } /** * Create a new microschema container. * * @param microschema * @param user * User that is used to set creator and editor references. * @param uuid * optional uuid * @return */ MicroschemaContainer create(Microschema microschema, User user, String uuid); ======= MicroschemaContainer create(MicroschemaModel microschema, User user); >>>>>>> default MicroschemaContainer create(MicroschemaModel microschema, User user) { return create(microschema, user, null); } /** * Create a new microschema container. * * @param microschema * @param user * User that is used to set creator and editor references. * @param uuid * optional uuid * @return */ MicroschemaContainer create(MicroschemaModel microschema, User user, String uuid);
<<<<<<< httpRequest.getRequestURL(), resourceRequest.getStatus(), resourceRequest.getRejectionReason()); httpResponse.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, resourceRequest.getRejectionReason()); ======= new Object[] { httpRequest.getRequestURL(), resourceRequest.getStatus(), resourceRequest.getRejectionReason() }); httpResponse.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); >>>>>>> httpRequest.getRequestURL(), resourceRequest.getStatus(), resourceRequest.getRejectionReason()); httpResponse.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
<<<<<<< .frameExplicit(GraphFieldContainerEdgeImpl.class)) { String languageAndBranch = String.format("%s - %s", edge.getBranchUuid(), edge.getLanguageTag()); if (languageAndBranchSet.contains(languageAndBranch)) { response.addInconsistency(String.format("The node has more than one GFC of type %s, language %s for branch %s", type, edge.getLanguageTag(), edge.getBranchUuid()), uuid, HIGH); ======= .frameExplicit(GraphFieldContainerEdgeImpl.class)) { String languageAndRelease = String.format("%s - %s", edge.getReleaseUuid(), edge.getLanguageTag()); if (languageAndReleaseSet.contains(languageAndRelease)) { result.addInconsistency(String.format("The node has more than one GFC of type %s, language %s for release %s", type, edge.getLanguageTag(), edge.getReleaseUuid()), uuid, HIGH); >>>>>>> .frameExplicit(GraphFieldContainerEdgeImpl.class)) { String languageAndBranch = String.format("%s - %s", edge.getBranchUuid(), edge.getLanguageTag()); if (languageAndBranchSet.contains(languageAndBranch)) { result.addInconsistency(String.format("The node has more than one GFC of type %s, language %s for branch %s", type, edge.getLanguageTag(), edge.getBranchUuid()), uuid, HIGH);
<<<<<<< private static final Logger log = LoggerFactory.getLogger(JsonUtil.class); /** * When enabled indented JSON will be produced. */ public static boolean debugMode = false; ======= >>>>>>> private static final Logger log = LoggerFactory.getLogger(JsonUtil.class);
<<<<<<< list.add(new Change_0A58BEF0E7E7488D98BEF0E7E7588D4D()); ======= list.add(new Change_07F0975BD47249C6B0975BD472E9C6A4()); // list.add(new Change_A36C972476C147F3AC972476C157F3EF()); >>>>>>> list.add(new Change_07F0975BD47249C6B0975BD472E9C6A4()); list.add(new Change_0A58BEF0E7E7488D98BEF0E7E7588D4D()); // list.add(new Change_A36C972476C147F3AC972476C157F3EF());
<<<<<<< /** * Get the Version Number or null if no version set * @return Version Number */ VersionNumber getVersion(); /** * Set the Version Number * @param version */ void setVersion(VersionNumber version); ======= /** * Return the schema container version that holds the schema that is used in combination with this node. * * @return Schema container version */ SchemaContainerVersion getSchemaContainerVersion(); /** * Set the schema container version that is used in combination with this node. * * @param schema */ void setSchemaContainerVersion(SchemaContainerVersion schema); >>>>>>> /** * Get the Version Number or null if no version set * @return Version Number */ VersionNumber getVersion(); /** * Set the Version Number * @param version */ void setVersion(VersionNumber version); /** * Return the schema container version that holds the schema that is used in combination with this node. * * @return Schema container version */ SchemaContainerVersion getSchemaContainerVersion(); /** * Set the schema container version that is used in combination with this node. * * @param schema */ void setSchemaContainerVersion(SchemaContainerVersion schema);
<<<<<<< ======= public PermissionInfo getPermissionInfo(MeshVertex vertex) { PermissionInfo info = new PermissionInfo(); Set<GraphPermission> permissions = getPermissions(vertex); for (GraphPermission perm : permissions) { info.set(perm.getRestPerm(), true); } info.setOthers(false, vertex.hasPublishPermissions()); return info; } @Override public Set<GraphPermission> getPermissions(MeshVertex vertex) { Predicate<? super GraphPermission> isValidPermission = perm -> perm != READ_PUBLISHED_PERM && perm != PUBLISH_PERM || vertex.hasPublishPermissions(); return Stream.of(GraphPermission.values()) // Don't check for publish perms if it does not make sense for the vertex type .filter(isValidPermission) .filter(perm -> hasPermission(vertex, perm)) .collect(Collectors.toSet()); } @Override public boolean hasPermissionForId(Object elementId, GraphPermission permission) { PermissionCache permissionCache = mesh().permissionCache(); if (permissionCache.hasPermission(id(), permission, elementId)) { return true; } else { // Admin users have all permissions if (isAdmin()) { for (GraphPermission perm : GraphPermission.values()) { permissionCache.store(id(), perm, elementId); } return true; } FramedGraph graph = getGraph(); // Find all roles that are assigned to the user by checking the // shortcut edge from the index String idxKey = "e." + ASSIGNED_TO_ROLE + "_out"; Iterable<Edge> roleEdges = graph.getEdges(idxKey.toLowerCase(), this.id()); Vertex vertex = graph.getVertex(elementId); for (Edge roleEdge : roleEdges) { Vertex role = roleEdge.getVertex(Direction.IN); Set<String> allowedRoles = vertex.getProperty(permission.propertyKey()); boolean hasPermission = allowedRoles != null && allowedRoles.contains(role.<String>getProperty("uuid")); if (hasPermission) { // We only store granting permissions in the store in order // reduce the invalidation calls. // This way we do not need to invalidate the cache if a role // is removed from a group or a role is deleted. permissionCache.store(id(), permission, elementId); return true; } } // Fall back to read and check whether the user has read perm. Read permission also includes read published. if (permission == READ_PUBLISHED_PERM) { return hasPermissionForId(elementId, READ_PERM); } else { return false; } } } @Override public boolean hasPermission(MeshVertex vertex, GraphPermission permission) { if (log.isTraceEnabled()) { log.debug("Checking permissions for vertex {" + vertex.getUuid() + "}"); } return hasPermissionForId(vertex.id(), permission); } @Override public boolean hasReadPermission(NodeGraphFieldContainer container, String branchUuid, String requestedVersion) { Node node = container.getParentNode(); if (hasPermission(node, READ_PERM)) { return true; } boolean published = container.isPublished(branchUuid); if (published && hasPermission(node, READ_PUBLISHED_PERM)) { return true; } return false; } @Override >>>>>>> <<<<<<< UserDaoWrapper userDao = mesh().boot().userDao(); return userDao.transformToRestSync(this, ac, level, languageTags); ======= GenericParameters generic = ac.getGenericParameters(); FieldsSet fields = generic.getFields(); UserResponse restUser = new UserResponse(); if (fields.has("username")) { restUser.setUsername(getUsername()); } if (fields.has("emailAddress")) { restUser.setEmailAddress(getEmailAddress()); } if (fields.has("firstname")) { restUser.setFirstname(getFirstname()); } if (fields.has("lastname")) { restUser.setLastname(getLastname()); } if (fields.has("admin")) { restUser.setAdmin(isAdmin()); } if (fields.has("enabled")) { restUser.setEnabled(isEnabled()); } if (fields.has("nodeReference")) { setNodeReference(ac, restUser, level); } if (fields.has("groups")) { setGroups(ac, restUser); } if (fields.has("rolesHash")) { restUser.setRolesHash(getRolesHash()); } if (fields.has("forcedPasswordChange")) { restUser.setForcedPasswordChange(isForcedPasswordChange()); } fillCommonRestFields(ac, fields, restUser); setRolePermissions(ac, restUser); return restUser; } /** * Set the groups to which the user belongs in the rest model. * * @param ac * @param restUser */ private void setGroups(InternalActionContext ac, UserResponse restUser) { // TODO filter by permissions for (Group group : getGroups()) { GroupReference reference = group.transformToReference(); restUser.getGroups().add(reference); } } /** * Add the node reference field to the user response (if required to). * * @param ac * @param restUser * @param level * Current depth level of transformation */ private void setNodeReference(InternalActionContext ac, UserResponse restUser, int level) { NodeParameters parameters = ac.getNodeParameters(); // Check whether a node reference was set. Node node = getReferencedNode(); if (node == null) { return; } // Check whether the node reference field of the user should be expanded boolean expandReference = parameters.getExpandedFieldnameList().contains("nodeReference") || parameters.getExpandAll(); if (expandReference) { restUser.setNodeResponse(node.transformToRestSync(ac, level)); } else { NodeReference userNodeReference = node.transformToReference(ac); restUser.setNodeReference(userNodeReference); } >>>>>>> UserDaoWrapper userDao = mesh().boot().userDao(); return userDao.transformToRestSync(this, ac, level, languageTags);
<<<<<<< public static final String PUBLISHED_WEBROOT_PROPERTY_KEY = "publishedWebrootPathInfo"; public static final String PUBLISHED_WEBROOT_INDEX_NAME = "publishedWebrootPathInfoIndex"; public static final String VERSION_PROPERTY_KEY = "version"; private static final Logger log = LoggerFactory.getLogger(NodeGraphFieldContainerImpl.class); ======= private static final String PUBLISHED_PROPERTY_KEY = "published"; >>>>>>> public static final String PUBLISHED_WEBROOT_PROPERTY_KEY = "publishedWebrootPathInfo"; public static final String PUBLISHED_WEBROOT_INDEX_NAME = "publishedWebrootPathInfoIndex"; public static final String VERSION_PROPERTY_KEY = "version"; <<<<<<< @Override public List<FieldContainerChange> compareTo(FieldMap fieldMap) { List<FieldContainerChange> changes = new ArrayList<>(); Schema schemaA = getSchemaContainerVersion().getSchema(); Map<String, FieldSchema> fieldSchemaMap = schemaA.getFieldsAsMap(); // Handle all fields for (String fieldName : fieldSchemaMap.keySet()) { FieldSchema fieldSchema = fieldSchemaMap.get(fieldName); // Check content GraphField fieldA = getField(fieldSchema); Field fieldB = fieldMap.getField(fieldName, fieldSchema); // Handle null cases. The field may not have been created yet. if (fieldA != null && fieldB == null && fieldMap.hasField(fieldName)) { // Field only exists in A changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } else if (fieldA == null && fieldB != null) { // Field only exists in B changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } else if (fieldA != null && fieldB != null) { // Field exists in A and B and the fields are not equal to each other. changes.addAll(fieldA.compareTo(fieldB)); } else { // Both fields are equal if those fields are both null } } return changes; } @Override public List<FieldContainerChange> compareTo(NodeGraphFieldContainer container) { List<FieldContainerChange> changes = new ArrayList<>(); Schema schemaA = getSchemaContainerVersion().getSchema(); Map<String, FieldSchema> fieldMapA = schemaA.getFieldsAsMap(); Schema schemaB = container.getSchemaContainerVersion().getSchema(); Map<String, FieldSchema> fieldMapB = schemaB.getFieldsAsMap(); // Generate a structural diff first. This way it is easy to determine which fields have been added or removed. MapDifference<String, FieldSchema> diff = Maps.difference(fieldMapA, fieldMapB, new Equivalence<FieldSchema>() { @Override protected boolean doEquivalent(FieldSchema a, FieldSchema b) { return a.getName().equals(b.getName()); } @Override protected int doHash(FieldSchema t) { // TODO Auto-generated method stub return 0; } }); // Handle fields which exist only in A - They have been removed in B for (FieldSchema field : diff.entriesOnlyOnLeft().values()) { changes.add(new FieldContainerChange(field.getName(), FieldChangeTypes.REMOVED)); } // Handle fields which don't exist in A - They have been added in B for (FieldSchema field : diff.entriesOnlyOnRight().values()) { changes.add(new FieldContainerChange(field.getName(), FieldChangeTypes.ADDED)); } // Handle fields which are common in both schemas for (String fieldName : diff.entriesInCommon().keySet()) { FieldSchema fieldSchemaA = fieldMapA.get(fieldName); FieldSchema fieldSchemaB = fieldMapB.get(fieldName); // Check whether the field type is different in between both schemas if (fieldSchemaA.getType().equals(fieldSchemaB.getType())) { // Check content GraphField fieldA = getField(fieldSchemaA); GraphField fieldB = container.getField(fieldSchemaB); // Handle null cases. The field may not have been created yet. if (fieldA != null && fieldB == null) { // Field only exists in A changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } else if (fieldA == null && fieldB != null) { // Field only exists in B changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } else if (fieldA != null && fieldB != null) { changes.addAll(fieldA.compareTo(fieldB)); } else { // Both fields are equal if those fields are both null } } else { // The field type has changed changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } } return changes; } @Override public List<? extends MicronodeGraphField> getMicronodeFields(MicroschemaContainerVersion version) { return outE(HAS_FIELD).has(MicronodeGraphFieldImpl.class).mark().inV().has(MicronodeImpl.class).out(HAS_MICROSCHEMA_CONTAINER) .has(MicroschemaContainerVersionImpl.class).has("uuid", version.getUuid()).back().toListExplicit(MicronodeGraphFieldImpl.class); } @Override public List<? extends MicronodeGraphFieldList> getMicronodeListFields(MicroschemaContainerVersion version) { return out(HAS_LIST).has(MicronodeGraphFieldListImpl.class).mark().out(HAS_ITEM).has(MicronodeImpl.class).out(HAS_MICROSCHEMA_CONTAINER) .has(MicroschemaContainerVersionImpl.class).has("uuid", version.getUuid()).back().toListExplicit(MicronodeGraphFieldListImpl.class); } @Override public String getETag(InternalActionContext ac) { return ETag.hash(getUuid()); } ======= @Override public void setPublished(boolean published) { setProperty(PUBLISHED_PROPERTY_KEY, String.valueOf(published)); } @Override public boolean isPublished() { String fieldValue = getProperty(PUBLISHED_PROPERTY_KEY); return Boolean.valueOf(fieldValue); } >>>>>>> @Override public List<FieldContainerChange> compareTo(FieldMap fieldMap) { List<FieldContainerChange> changes = new ArrayList<>(); Schema schemaA = getSchemaContainerVersion().getSchema(); Map<String, FieldSchema> fieldSchemaMap = schemaA.getFieldsAsMap(); // Handle all fields for (String fieldName : fieldSchemaMap.keySet()) { FieldSchema fieldSchema = fieldSchemaMap.get(fieldName); // Check content GraphField fieldA = getField(fieldSchema); Field fieldB = fieldMap.getField(fieldName, fieldSchema); // Handle null cases. The field may not have been created yet. if (fieldA != null && fieldB == null && fieldMap.hasField(fieldName)) { // Field only exists in A changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } else if (fieldA == null && fieldB != null) { // Field only exists in B changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } else if (fieldA != null && fieldB != null) { // Field exists in A and B and the fields are not equal to each other. changes.addAll(fieldA.compareTo(fieldB)); } else { // Both fields are equal if those fields are both null } } return changes; } @Override public List<FieldContainerChange> compareTo(NodeGraphFieldContainer container) { List<FieldContainerChange> changes = new ArrayList<>(); Schema schemaA = getSchemaContainerVersion().getSchema(); Map<String, FieldSchema> fieldMapA = schemaA.getFieldsAsMap(); Schema schemaB = container.getSchemaContainerVersion().getSchema(); Map<String, FieldSchema> fieldMapB = schemaB.getFieldsAsMap(); // Generate a structural diff first. This way it is easy to determine which fields have been added or removed. MapDifference<String, FieldSchema> diff = Maps.difference(fieldMapA, fieldMapB, new Equivalence<FieldSchema>() { @Override protected boolean doEquivalent(FieldSchema a, FieldSchema b) { return a.getName().equals(b.getName()); } @Override protected int doHash(FieldSchema t) { // TODO Auto-generated method stub return 0; } }); // Handle fields which exist only in A - They have been removed in B for (FieldSchema field : diff.entriesOnlyOnLeft().values()) { changes.add(new FieldContainerChange(field.getName(), FieldChangeTypes.REMOVED)); } // Handle fields which don't exist in A - They have been added in B for (FieldSchema field : diff.entriesOnlyOnRight().values()) { changes.add(new FieldContainerChange(field.getName(), FieldChangeTypes.ADDED)); } // Handle fields which are common in both schemas for (String fieldName : diff.entriesInCommon().keySet()) { FieldSchema fieldSchemaA = fieldMapA.get(fieldName); FieldSchema fieldSchemaB = fieldMapB.get(fieldName); // Check whether the field type is different in between both schemas if (fieldSchemaA.getType().equals(fieldSchemaB.getType())) { // Check content GraphField fieldA = getField(fieldSchemaA); GraphField fieldB = container.getField(fieldSchemaB); // Handle null cases. The field may not have been created yet. if (fieldA != null && fieldB == null) { // Field only exists in A changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } else if (fieldA == null && fieldB != null) { // Field only exists in B changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } else if (fieldA != null && fieldB != null) { changes.addAll(fieldA.compareTo(fieldB)); } else { // Both fields are equal if those fields are both null } } else { // The field type has changed changes.add(new FieldContainerChange(fieldName, FieldChangeTypes.UPDATED)); } } return changes; } @Override public List<? extends MicronodeGraphField> getMicronodeFields(MicroschemaContainerVersion version) { return outE(HAS_FIELD).has(MicronodeGraphFieldImpl.class).mark().inV().has(MicronodeImpl.class).out(HAS_MICROSCHEMA_CONTAINER) .has(MicroschemaContainerVersionImpl.class).has("uuid", version.getUuid()).back().toListExplicit(MicronodeGraphFieldImpl.class); } @Override public List<? extends MicronodeGraphFieldList> getMicronodeListFields(MicroschemaContainerVersion version) { return out(HAS_LIST).has(MicronodeGraphFieldListImpl.class).mark().out(HAS_ITEM).has(MicronodeImpl.class).out(HAS_MICROSCHEMA_CONTAINER) .has(MicroschemaContainerVersionImpl.class).has("uuid", version.getUuid()).back().toListExplicit(MicronodeGraphFieldListImpl.class); } @Override public String getETag(InternalActionContext ac) { return ETag.hash(getUuid()); }
<<<<<<< /* * Copyright (C) 2012-2017 52°North Initiative for Geospatial Open Source ======= /** * Copyright (C) 2012-2017 52°North Initiative for Geospatial Open Source >>>>>>> <<<<<<< ======= import static org.n52.sos.ext.deleteobservation.DeleteObservationConstants.*; import java.util.Collections; import java.util.Map; import java.util.Set; >>>>>>> <<<<<<< import org.n52.shetland.ogc.sos.Sos2Constants; import org.n52.shetland.ogc.sos.delobs.DeleteObservationConstants; import org.n52.shetland.ogc.sos.request.DeleteObservationRequest; import org.n52.sos.decode.kvp.AbstractSosKvpDecoder; ======= import org.n52.sos.decode.kvp.AbstractKvpDecoder; import org.n52.sos.exception.ows.InvalidParameterValueException; import org.n52.sos.exception.ows.concrete.MissingRequestParameterException; import org.n52.sos.exception.ows.concrete.MissingServiceParameterException; import org.n52.sos.exception.ows.concrete.MissingVersionParameterException; import org.n52.sos.exception.ows.concrete.UnsupportedDecoderInputException; import org.n52.sos.ext.deleteobservation.DeleteObservationConstants; import org.n52.sos.ext.deleteobservation.DeleteObservationRequest; import org.n52.sos.ext.deleteobservation.MissingObservationParameterException; import org.n52.sos.ogc.ows.CompositeOwsException; import org.n52.sos.ogc.ows.OWSConstants; import org.n52.sos.ogc.ows.OwsExceptionReport; import org.n52.sos.ogc.sos.Sos2Constants; import org.n52.sos.ogc.sos.SosConstants; import org.n52.sos.util.KvpHelper; import org.n52.sos.util.http.MediaTypes; >>>>>>> <<<<<<< @Override protected void getRequestParameterDefinitions(Builder<DeleteObservationRequest> builder) { builder.add(DeleteObservationConstants.PARAMETER_NAME, DeleteObservationRequest::setObservationIdentifier); ======= public DeleteObservationRequest decode(Map<String, String> objectToDecode) throws OwsExceptionReport { if (objectToDecode == null) { throw new UnsupportedDecoderInputException(this, objectToDecode); } DeleteObservationRequest request = new DeleteObservationRequest(NS_SOSDO_2_0); CompositeOwsException exceptions = new CompositeOwsException(); boolean foundRequest = false, foundService = false, foundVersion = false, foundObservation = false; for (String parameterName : objectToDecode.keySet()) { String parameterValues = objectToDecode.get(parameterName); try { if (parameterName.equalsIgnoreCase(OWSConstants.RequestParams.service.name())) { request.setService(KvpHelper.checkParameterSingleValue(parameterValues, parameterName)); foundService = true; } else if (parameterName.equalsIgnoreCase(OWSConstants.RequestParams.version.name())) { request.setVersion(KvpHelper.checkParameterSingleValue(parameterValues, parameterName)); foundVersion = true; } else if (parameterName.equalsIgnoreCase(OWSConstants.RequestParams.request.name())) { KvpHelper.checkParameterSingleValue(parameterValues, parameterName); foundRequest = true; } // observation (optional) else if (parameterName.equalsIgnoreCase(PARAM_OBSERVATION)) { request.setObservationIdentifiers(KvpHelper.checkParameterMultipleValues(parameterValues, parameterName)); foundObservation = true; } // offering (optional) else if (parameterName.equalsIgnoreCase(PARAM_OFFERING)) { request.setOfferings(KvpHelper.checkParameterMultipleValues(parameterValues, parameterName)); } // observedProperty (optional) else if (parameterName.equalsIgnoreCase(PARAM_OBSERVED_PROPERTY)) { request.setObservedProperties(KvpHelper.checkParameterMultipleValues(parameterValues, parameterName)); } // procedure (optional) else if (parameterName.equalsIgnoreCase(PARAM_PROCEDURE)) { request.setProcedures(KvpHelper.checkParameterMultipleValues(parameterValues, parameterName)); } // featureOfInterest (optional) else if (parameterName.equalsIgnoreCase(PARAM_FEATURE_OF_INTEREST)) { request.setFeatureIdentifiers(KvpHelper.checkParameterMultipleValues(parameterValues, parameterName)); } // eventTime (optional) else if (parameterName.equalsIgnoreCase(PARAM_TEMPORAL_FILTER)) { try { request.setTemporalFilters(parseTemporalFilter( KvpHelper.checkParameterMultipleValues(parameterValues, parameterName), parameterName)); } catch (final OwsExceptionReport e) { exceptions.add(new InvalidParameterValueException(parameterName, parameterValues).causedBy(e)); } } } catch (OwsExceptionReport owse) { exceptions.add(owse); } } if (!foundService) { exceptions.add(new MissingServiceParameterException()); } if (!foundVersion) { exceptions.add(new MissingVersionParameterException()); } if (!foundRequest) { exceptions.add(new MissingRequestParameterException()); } exceptions.throwIfNotEmpty(); return request; >>>>>>>
<<<<<<< @MeshTestSetting(useElasticsearch = false, inMemoryDB = false, testSize = FULL, startServer = true, clusterMode = true, monitoring = false) ======= @MeshTestSetting(elasticsearch = TRACKING, inMemoryDB = false, testSize = FULL, startServer = true, clusterMode = true) >>>>>>> @MeshTestSetting(elasticsearch = TRACKING, inMemoryDB = false, testSize = FULL, startServer = true, clusterMode = true, monitoring = false)
<<<<<<< List<ConsistencyCheck> consistencyChecks(); ======= BucketManager bucketManager(); @Component.Builder >>>>>>> List<ConsistencyCheck> consistencyChecks(); BucketManager bucketManager(); <<<<<<< ======= >>>>>>>
<<<<<<< import com.gentics.ferma.Tx; ======= import com.gentics.mesh.core.data.schema.MicroschemaContainer; >>>>>>> import com.gentics.ferma.Tx; import com.gentics.mesh.core.data.schema.MicroschemaContainer; <<<<<<< ======= import com.gentics.mesh.graphdb.NoTx; import com.gentics.mesh.json.JsonUtil; >>>>>>> import com.gentics.mesh.json.JsonUtil; <<<<<<< public void testDataFetchingError() throws Throwable { try (Tx tx = tx()) { role().revokePermissions(project(), READ_PERM); } GraphQLResponse response = call(() -> client().graphqlQuery(PROJECT_NAME, "{project{name}}")); System.out.println(response.getData().encodePrettily()); } @Test ======= >>>>>>>
<<<<<<< ======= import static com.gentics.mesh.core.data.relationship.GraphPermission.DELETE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.UPDATE_PERM; import static com.gentics.mesh.core.rest.error.Errors.error; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.CREATED; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static io.netty.handler.codec.http.HttpResponseStatus.NO_CONTENT; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import javax.inject.Inject; import javax.inject.Singleton; import com.gentics.mesh.Mesh; >>>>>>> import static com.gentics.mesh.core.data.relationship.GraphPermission.DELETE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.UPDATE_PERM; import static com.gentics.mesh.core.rest.error.Errors.error; import static com.gentics.mesh.core.rest.event.EventCauseAction.DELETE; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.CREATED; import static io.netty.handler.codec.http.HttpResponseStatus.NO_CONTENT; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.function.Function; import javax.inject.Inject; import javax.inject.Singleton; <<<<<<< import com.gentics.mesh.event.EventQueueBatch; ======= import com.gentics.mesh.etc.config.MeshOptions; >>>>>>> import com.gentics.mesh.etc.config.MeshOptions; import com.gentics.mesh.event.EventQueueBatch; <<<<<<< ======= private SearchQueue searchQueue; private final boolean clustered; >>>>>>> private final boolean clustered; <<<<<<< public HandlerUtilities(Database database) { ======= public HandlerUtilities(Database database, SearchQueue searchQueue, MeshOptions meshOptions) { this.searchQueue = searchQueue; >>>>>>> public HandlerUtilities(Database database, MeshOptions meshOptions) { <<<<<<< syncTx(ac, () -> { ======= asyncTx(ac, (tx) -> { >>>>>>> syncTx(ac, () -> { <<<<<<< }, model -> ac.send(model, created.get() ? CREATED : OK)); ======= // 3. The updating transaction has succeeded. Now lets store it in the index final ResultInfo info2 = info; return database.tx(() -> { info2.getBatch().processSync(); return info2.getModel(); }); }, model -> { ac.send(model, created.get() ? CREATED : OK); }); >>>>>>> }, model -> ac.send(model, created.get() ? CREATED : OK));
<<<<<<< import com.gentics.mesh.core.rest.branch.BranchCreateRequest; ======= import com.gentics.mesh.core.rest.admin.migration.MigrationStatus; import com.gentics.mesh.core.rest.node.NodeCreateRequest; import com.gentics.mesh.core.rest.node.NodeListResponse; >>>>>>> import com.gentics.mesh.core.rest.admin.migration.MigrationStatus; import com.gentics.mesh.core.rest.branch.BranchCreateRequest; import com.gentics.mesh.core.rest.node.NodeCreateRequest; import com.gentics.mesh.core.rest.node.NodeListResponse; <<<<<<< ======= import com.gentics.mesh.core.rest.node.NodeUpdateRequest; import com.gentics.mesh.core.rest.release.ReleaseCreateRequest; import com.gentics.mesh.core.rest.schema.impl.SchemaReferenceImpl; import com.gentics.mesh.parameter.VersioningParameters; >>>>>>> import com.gentics.mesh.core.rest.node.NodeUpdateRequest; import com.gentics.mesh.core.rest.schema.impl.SchemaReferenceImpl; import com.gentics.mesh.parameter.VersioningParameters; <<<<<<< () -> client().findNodeByUuid(PROJECT_NAME, uuid, new VersioningParametersImpl().setBranch(initialBranchUuid()))); ======= () -> client().findNodeByUuid(PROJECT_NAME, uuid, new VersioningParametersImpl().setRelease(initialReleaseUuid()))); >>>>>>> () -> client().findNodeByUuid(PROJECT_NAME, uuid, new VersioningParametersImpl().setBranch(initialBranchUuid()))); <<<<<<< // Also verify that the node is loadable in the other branch NodeResponse responseForBranch = call( () -> client().findNodeByUuid(PROJECT_NAME, uuid, new VersioningParametersImpl().setBranch("newBranch"))); assertThat(responseForBranch.getAvailableLanguages()).as("The node should have two container").hasSize(2); ======= // Also verify that the node is loadable in the other release NodeResponse responseForRelease = call( () -> client().findNodeByUuid(PROJECT_NAME, uuid, new VersioningParametersImpl().setRelease("newRelease"))); assertThat(responseForRelease.getAvailableLanguages()).as("The node should have two container").hasSize(2); >>>>>>> // Also verify that the node is loadable in the other branch NodeResponse responseForRelease = call( () -> client().findNodeByUuid(PROJECT_NAME, uuid, new VersioningParametersImpl().setBranch("newRelease"))); assertThat(responseForRelease.getAvailableLanguages()).as("The node should have two container").hasSize(2); <<<<<<< assertThatSubNodesExist(childrenUuids,initialBranchUuid()); assertThatSubNodesExist(childrenUuids,branchName); ======= assertThatSubNodesExist(childrenUuids, initialReleaseUuid()); assertThatSubNodesExist(childrenUuids, releaseName); >>>>>>> assertThatSubNodesExist(childrenUuids,initialBranchUuid()); assertThatSubNodesExist(childrenUuids,branchName); <<<<<<< "node_error_delete_failed_node_has_children"); assertThatSubNodesExist(childrenUuids, branchName); assertThatSubNodesExist(childrenUuids, initialBranchUuid()); // Delete the second language container (english) and use the recursive flag. The node and all subnodes should have been removed in the current branch ======= "node_error_delete_failed_node_has_children"); assertThatSubNodesExist(childrenUuids, releaseName); assertThatSubNodesExist(childrenUuids, initialReleaseUuid()); // Delete the second language container (english) and use the recursive flag. The node and all subnodes should have been removed in the current release >>>>>>> "node_error_delete_failed_node_has_children"); assertThatSubNodesExist(childrenUuids, branchName); assertThatSubNodesExist(childrenUuids, initialBranchUuid()); // Delete the second language container (english) and use the recursive flag. The node and all subnodes should have been removed in the current release <<<<<<< NodeResponse nodeResponse = call(() -> client().findNodeByUuid(PROJECT_NAME, childUuid, new VersioningParametersImpl().setBranch(branchName))); assertNull("We currently expect the node to be returned but without any contents.",nodeResponse.getLanguage()); ======= NodeResponse nodeResponse = call( () -> client().findNodeByUuid(PROJECT_NAME, childUuid, new VersioningParametersImpl().setRelease(releaseName))); assertNull("We currently expect the node to be returned but without any contents.", nodeResponse.getLanguage()); >>>>>>> NodeResponse nodeResponse = call( () -> client().findNodeByUuid(PROJECT_NAME, childUuid, new VersioningParametersImpl().setBranch(branchName))); assertNull("We currently expect the node to be returned but without any contents.", nodeResponse.getLanguage()); <<<<<<< // call(() -> client().findNodeByUuid(PROJECT_NAME, childUuid, new VersioningParametersImpl().setBranch(branchName)), NOT_FOUND, "object_not_found_for_uuid", childUuid); ======= // call(() -> client().findNodeByUuid(PROJECT_NAME, childUuid, new VersioningParametersImpl().setRelease(releaseName)), NOT_FOUND, // "object_not_found_for_uuid", childUuid); >>>>>>> // call(() -> client().findNodeByUuid(PROJECT_NAME, childUuid, new VersioningParametersImpl().setBranch(releaseName)), NOT_FOUND, // "object_not_found_for_uuid", childUuid);
<<<<<<< import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; ======= import static javax.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; import static org.apache.commons.lang.StringUtils.isBlank; import static org.apache.commons.lang.StringUtils.isNotBlank; >>>>>>> import static javax.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;