getSupportedTypes(ParseContext context) {
+ return getWrappedParser().getSupportedTypes(context);
+ }
+
+ /**
+ * Acts like a regular parser except it ignores the ContentHandler
+ * and it automatically sets/overwrites the embedded Parser in the
+ * ParseContext object.
+ *
+ * To retrieve the results of the parse, use {@link #getMetadata()}.
+ *
+ * Make sure to call {@link #reset()} after each parse.
+ */
+ @Override
+"
+-1," private void init(InputStream is) {
+ if (is == null) {
+ return;
+ }
+ Properties props = new Properties();
+ try {
+ props.load(is);
+ } catch (IOException e) {
+ } finally {
+ if (is != null) {
+ try {
+ is.close();
+ } catch (IOException e) {
+ //swallow
+ }
+ }
+ }
+
+ // set parameters for Tesseract
+ setTesseractPath(
+ getProp(props, ""tesseractPath"", getTesseractPath()));
+ setTessdataPath(
+ getProp(props, ""tessdataPath"", getTessdataPath()));
+ setLanguage(
+ getProp(props, ""language"", getLanguage()));
+ setPageSegMode(
+ getProp(props, ""pageSegMode"", getPageSegMode()));
+ setMinFileSizeToOcr(
+ getProp(props, ""minFileSizeToOcr"", getMinFileSizeToOcr()));
+ setMaxFileSizeToOcr(
+ getProp(props, ""maxFileSizeToOcr"", getMaxFileSizeToOcr()));
+ setTimeout(
+ getProp(props, ""timeout"", getTimeout()));
+ setOutputType(getProp(props, ""outputType"", getOutputType().toString()));
+ setPreserveInterwordSpacing(getProp(props, ""preserveInterwordSpacing"", false));
+
+ // set parameters for ImageMagick
+ setEnableImageProcessing(
+ getProp(props, ""enableImageProcessing"", isEnableImageProcessing()));
+ setImageMagickPath(
+ getProp(props, ""ImageMagickPath"", getImageMagickPath()));
+ setDensity(
+ getProp(props, ""density"", getDensity()));
+ setDepth(
+ getProp(props, ""depth"", getDepth()));
+ setColorspace(
+ getProp(props, ""colorspace"", getColorspace()));
+ setFilter(
+ getProp(props, ""filter"", getFilter()));
+ setResize(
+ getProp(props, ""resize"", getResize()));
+ setApplyRotation(
+ getProp(props, ""applyRotation"", getApplyRotation()));
+
+ loadOtherTesseractConfig(props);
+ }
+
+ /**
+ * @see #setTesseractPath(String tesseractPath)
+ */
+"
+-1," public String getInfo() {
+
+ return (info);
+
+ }
+
+
+"
+-1," public int callback(int num_msg, Pointer msg, Pointer resp, Pointer _) {
+ LOGGER.fine(""pam_conv num_msg=""+num_msg);
+ if(password==null)
+ return PAM_CONV_ERR;
+
+ // allocates pam_response[num_msg]. the caller will free this
+ Pointer m = libc.calloc(pam_response.SIZE,num_msg);
+ resp.setPointer(0,m);
+
+ for( int i=0; i
+ * J.3.2, Page 155, ECDSA over the field Fp
+ * an example with 239 bit prime
+ */
+"
+-1," private void debug(String msg) {
+ if ( logger.isDebugEnabled() ) {
+ logger.debug(Logger.EVENT_SUCCESS, msg);
+ }
+ }
+"
+-1," private static DocumentBuilderFactory createDocumentBuilderFactory() throws ParserConfigurationException {
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING , true);
+
+ dbf.setValidating(false);
+ dbf.setIgnoringComments(false);
+ dbf.setIgnoringElementContentWhitespace(true);
+ dbf.setNamespaceAware(true);
+ // dbf.setCoalescing(true);
+ // dbf.setExpandEntityReferences(true);
+
+ return dbf;
+ }
+
+ /**
+ * Read XML as DOM.
+ */
+"
+-1," public void changePassword_Resets_All_Sessions() throws Exception {
+ ScimUser user = createUser();
+
+ MockHttpSession session = new MockHttpSession();
+ MockHttpSession afterLoginSessionA = (MockHttpSession) getMockMvc().perform(post(""/login.do"")
+ .session(session)
+ .accept(TEXT_HTML_VALUE)
+ .param(""username"", user.getUserName())
+ .param(""password"", ""secr3T""))
+ .andExpect(status().isFound())
+ .andExpect(redirectedUrl(""/""))
+ .andReturn().getRequest().getSession(false);
+
+ session = new MockHttpSession();
+ MockHttpSession afterLoginSessionB = (MockHttpSession) getMockMvc().perform(post(""/login.do"")
+ .session(session)
+ .accept(TEXT_HTML_VALUE)
+ .param(""username"", user.getUserName())
+ .param(""password"", ""secr3T""))
+ .andExpect(status().isFound())
+ .andExpect(redirectedUrl(""/""))
+ .andReturn().getRequest().getSession(false);
+
+
+ assertNotNull(afterLoginSessionA.getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY));
+ assertNotNull(afterLoginSessionB.getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY));
+
+ getMockMvc().perform(get(""/profile"").session(afterLoginSessionB))
+ .andExpect(status().isOk());
+
+ Thread.sleep(1000 - (System.currentTimeMillis() % 1000) + 1);
+
+ MockHttpSession afterPasswordChange = (MockHttpSession) getMockMvc().perform(post(""/change_password.do"")
+ .session(afterLoginSessionA)
+ .with(csrf())
+ .accept(TEXT_HTML_VALUE)
+ .param(""current_password"", ""secr3T"")
+ .param(""new_password"", ""secr3T1"")
+ .param(""confirm_password"", ""secr3T1""))
+ .andExpect(status().isFound())
+ .andExpect(redirectedUrl(""profile""))
+ .andReturn().getRequest().getSession(false);
+
+ assertTrue(afterLoginSessionA.isInvalid());
+ assertNotNull(afterPasswordChange);
+ assertNotNull(afterPasswordChange.getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY));
+ assertNotSame(afterLoginSessionA, afterPasswordChange);
+ getMockMvc().perform(
+ get(""/profile"")
+ .session(afterLoginSessionB)
+ .accept(TEXT_HTML))
+ .andExpect(status().isFound())
+ .andExpect(redirectedUrl(""/login""));
+
+ }
+
+"
+-1," public void setValueStackFactory(ValueStackFactory valueStackFactory) {
+ this.valueStackFactory = valueStackFactory;
+ }
+
+ @Inject(""devMode"")
+"
+-1," private List createAuthInfo(SolrZkClient zkClient) {
+ List ret = new LinkedList();
+
+ // In theory the credentials to add could change here if zookeeper hasn't been initialized
+ ZkCredentialsProvider credentialsProvider =
+ zkClient.getZkClientConnectionStrategy().getZkCredentialsToAddAutomatically();
+ for (ZkCredentialsProvider.ZkCredentials zkCredentials : credentialsProvider.getCredentials()) {
+ ret.add(new AuthInfo(zkCredentials.getScheme(), zkCredentials.getAuth()));
+ }
+ return ret;
+ }
+ }
+}
+"
+-1," public String changePassword(
+ Model model,
+ @RequestParam(""current_password"") String currentPassword,
+ @RequestParam(""new_password"") String newPassword,
+ @RequestParam(""confirm_password"") String confirmPassword,
+ HttpServletResponse response,
+ HttpServletRequest request) {
+
+ PasswordConfirmationValidation validation = new PasswordConfirmationValidation(newPassword, confirmPassword);
+ if (!validation.valid()) {
+ model.addAttribute(""message_code"", validation.getMessageCode());
+ response.setStatus(HttpStatus.UNPROCESSABLE_ENTITY.value());
+ return ""change_password"";
+ }
+
+ SecurityContext securityContext = SecurityContextHolder.getContext();
+ Authentication authentication = securityContext.getAuthentication();
+ String username = authentication.getName();
+
+ try {
+ changePasswordService.changePassword(username, currentPassword, newPassword);
+ request.getSession().invalidate();
+ request.getSession(true);
+ if (authentication instanceof UaaAuthentication) {
+ UaaAuthentication uaaAuthentication = (UaaAuthentication)authentication;
+ authentication = new UaaAuthentication(
+ uaaAuthentication.getPrincipal(),
+ new LinkedList<>(uaaAuthentication.getAuthorities()),
+ new UaaAuthenticationDetails(request)
+ );
+ }
+ securityContext.setAuthentication(authentication);
+ return ""redirect:profile"";
+ } catch (BadCredentialsException e) {
+ model.addAttribute(""message_code"", ""unauthorized"");
+ } catch (InvalidPasswordException e) {
+ model.addAttribute(""message"", e.getMessagesAsOneString());
+ }
+ response.setStatus(HttpStatus.UNPROCESSABLE_ENTITY.value());
+ return ""change_password"";
+ }
+"
+-1," public void testSnapshotMoreThanOnce() throws ExecutionException, InterruptedException, IOException {
+ Client client = client();
+ final File tempDir = randomRepoPath().getAbsoluteFile();
+ logger.info(""--> creating repository"");
+ assertAcked(client.admin().cluster().preparePutRepository(""test-repo"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder()
+ .put(""location"", tempDir)
+ .put(""compress"", randomBoolean())
+ .put(""chunk_size"", randomIntBetween(100, 1000))));
+
+ // only one shard
+ assertAcked(prepareCreate(""test"").setSettings(ImmutableSettings.builder()
+ .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
+ ));
+ ensureYellow();
+ logger.info(""--> indexing"");
+
+ final int numDocs = randomIntBetween(10, 100);
+ IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
+ for (int i = 0; i < builders.length; i++) {
+ builders[i] = client().prepareIndex(""test"", ""doc"", Integer.toString(i)).setSource(""foo"", ""bar"" + i);
+ }
+ indexRandom(true, builders);
+ flushAndRefresh();
+ assertNoFailures(client().admin().indices().prepareOptimize(""test"").setFlush(true).setMaxNumSegments(1).get());
+
+ CreateSnapshotResponse createSnapshotResponseFirst = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test"").setWaitForCompletion(true).setIndices(""test"").get();
+ assertThat(createSnapshotResponseFirst.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponseFirst.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseFirst.getSnapshotInfo().totalShards()));
+ assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+ {
+ SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus(""test-repo"").setSnapshots(""test"").get().getSnapshots().get(0);
+ List shards = snapshotStatus.getShards();
+ for (SnapshotIndexShardStatus status : shards) {
+ assertThat(status.getStats().getProcessedFiles(), greaterThan(1));
+ }
+ }
+ if (frequently()) {
+ logger.info(""--> upgrade"");
+ client().admin().indices().prepareUpdateSettings(""test"").setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, ""none"")).get();
+ backwardsCluster().allowOnAllNodes(""test"");
+ logClusterState();
+ boolean upgraded;
+ do {
+ logClusterState();
+ CountResponse countResponse = client().prepareCount().get();
+ assertHitCount(countResponse, numDocs);
+ upgraded = backwardsCluster().upgradeOneNode();
+ ensureYellow();
+ countResponse = client().prepareCount().get();
+ assertHitCount(countResponse, numDocs);
+ } while (upgraded);
+ client().admin().indices().prepareUpdateSettings(""test"").setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, ""all"")).get();
+ }
+ if (cluster().numDataNodes() > 1 && randomBoolean()) { // only bump the replicas if we have enough nodes
+ logger.info(""--> move from 0 to 1 replica"");
+ client().admin().indices().prepareUpdateSettings(""test"").setSettings(ImmutableSettings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
+ }
+ logger.debug(""---> repo exists: "" + new File(tempDir, ""indices/test/0"").exists() + "" files: "" + Arrays.toString(new File(tempDir, ""indices/test/0"").list())); // it's only one shard!
+ CreateSnapshotResponse createSnapshotResponseSecond = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-1"").setWaitForCompletion(true).setIndices(""test"").get();
+ assertThat(createSnapshotResponseSecond.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponseSecond.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseSecond.getSnapshotInfo().totalShards()));
+ assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-1"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+ {
+ SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus(""test-repo"").setSnapshots(""test-1"").get().getSnapshots().get(0);
+ List shards = snapshotStatus.getShards();
+ for (SnapshotIndexShardStatus status : shards) {
+
+ assertThat(status.getStats().getProcessedFiles(), equalTo(1)); // we flush before the snapshot such that we have to process the segments_N files
+ }
+ }
+
+ client().prepareDelete(""test"", ""doc"", ""1"").get();
+ CreateSnapshotResponse createSnapshotResponseThird = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-2"").setWaitForCompletion(true).setIndices(""test"").get();
+ assertThat(createSnapshotResponseThird.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponseThird.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseThird.getSnapshotInfo().totalShards()));
+ assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-2"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+ {
+ SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus(""test-repo"").setSnapshots(""test-2"").get().getSnapshots().get(0);
+ List shards = snapshotStatus.getShards();
+ for (SnapshotIndexShardStatus status : shards) {
+ assertThat(status.getStats().getProcessedFiles(), equalTo(2)); // we flush before the snapshot such that we have to process the segments_N files plus the .del file
+ }
+ }
+ }
+"
+-1," protected static void setupFeatures(DocumentBuilderFactory factory) {
+ Properties properties = System.getProperties();
+ List features = new ArrayList();
+ for (Map.Entry prop : properties.entrySet()) {
+ String key = (String) prop.getKey();
+ if (key.startsWith(DOCUMENT_BUILDER_FACTORY_FEATURE)) {
+ String uri = key.split(DOCUMENT_BUILDER_FACTORY_FEATURE + "":"")[1];
+ Boolean value = Boolean.valueOf((String)prop.getValue());
+ try {
+ factory.setFeature(uri, value);
+ features.add(""feature "" + uri + "" value "" + value);
+ } catch (ParserConfigurationException e) {
+ LOG.warn(""DocumentBuilderFactory doesn't support the feature {} with value {}, due to {}."", new Object[]{uri, value, e});
+ }
+ }
+ }
+ if (features.size() > 0) {
+ StringBuffer featureString = new StringBuffer();
+ // just log the configured feature
+ for (String feature : features) {
+ if (featureString.length() != 0) {
+ featureString.append("", "");
+ }
+ featureString.append(feature);
+ }
+ }
+
+ }
+
+"
+-1," public void testSaveAndLoad() throws IOException {
+ assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
+
+ ProjectWorkspace workspace =
+ TestDataHelper.createProjectWorkspaceForScenario(this, ""parser_with_cell"", tmp);
+ workspace.setUp();
+
+ // Warm the parser cache.
+ TestContext context = new TestContext();
+ ProcessResult runBuckResult =
+ workspace.runBuckdCommand(context, ""query"", ""deps(//Apps:TestAppsLibrary)"");
+ runBuckResult.assertSuccess();
+ assertThat(
+ runBuckResult.getStdout(),
+ Matchers.containsString(
+ ""//Apps:TestAppsLibrary\n""
+ + ""//Libraries/Dep1:Dep1_1\n""
+ + ""//Libraries/Dep1:Dep1_2\n""
+ + ""bar//Dep2:Dep2""));
+
+ // Save the parser cache to a file.
+ NamedTemporaryFile tempFile = new NamedTemporaryFile(""parser_data"", null);
+ runBuckResult =
+ workspace.runBuckdCommand(context, ""parser-cache"", ""--save"", tempFile.get().toString());
+ runBuckResult.assertSuccess();
+
+ // Write an empty content to Apps/BUCK.
+ Path path = tmp.getRoot().resolve(""Apps/BUCK"");
+ byte[] data = {};
+ Files.write(path, data);
+
+ context = new TestContext();
+ // Load the parser cache to a new buckd context.
+ runBuckResult =
+ workspace.runBuckdCommand(context, ""parser-cache"", ""--load"", tempFile.get().toString());
+ runBuckResult.assertSuccess();
+
+ // Perform the query again. If we didn't load the parser cache, this call would fail because
+ // Apps/BUCK is empty.
+ runBuckResult = workspace.runBuckdCommand(context, ""query"", ""deps(//Apps:TestAppsLibrary)"");
+ runBuckResult.assertSuccess();
+ assertThat(
+ runBuckResult.getStdout(),
+ Matchers.containsString(
+ ""//Apps:TestAppsLibrary\n""
+ + ""//Libraries/Dep1:Dep1_1\n""
+ + ""//Libraries/Dep1:Dep1_2\n""
+ + ""bar//Dep2:Dep2""));
+ }
+
+ @Test
+"
+-1," public void setEnforceAssertionsSigned(boolean enforceAssertionsSigned) {
+ this.enforceAssertionsSigned = enforceAssertionsSigned;
+ }
+
+ /**
+ * Enforce that the Issuer of the received Response/Assertion is known. The default is true.
+ */
+"
+-1," public void setUp() throws Exception {
+ SecurityContextHolder.clearContext();
+ scimUserProvisioning = mock(ScimUserProvisioning.class);
+ codeStore = mock(ExpiringCodeStore.class);
+ passwordValidator = mock(PasswordValidator.class);
+ clientDetailsService = mock(ClientDetailsService.class);
+ emailResetPasswordService = new UaaResetPasswordService(scimUserProvisioning, codeStore, passwordValidator, clientDetailsService);
+ }
+
+ @After
+"
+-1," public static final void main(String args[]) {
+ System.out.println(""Supported pseudo-random functions for KDF (version: "" + kdfVersion + "")"");
+ System.out.println(""Enum Name\tAlgorithm\t# bits"");
+ for (PRF_ALGORITHMS prf : PRF_ALGORITHMS.values()) {
+ System.out.println(prf + ""\t"" + prf.getAlgName() + ""\t"" + prf.getBits());
+ }
+ }
+"
+-1," private void doTestExtensionSizeLimit(int len, boolean ok) throws Exception {
+ // Setup Tomcat instance
+ Tomcat tomcat = getTomcatInstance();
+
+ tomcat.getConnector().setProperty(
+ ""maxExtensionSize"", Integer.toString(EXT_SIZE_LIMIT));
+
+ // Must have a real docBase - just use temp
+ Context ctx =
+ tomcat.addContext("""", System.getProperty(""java.io.tmpdir""));
+
+ Tomcat.addServlet(ctx, ""servlet"", new EchoHeaderServlet());
+ ctx.addServletMapping(""/"", ""servlet"");
+
+ tomcat.start();
+
+ String extName = "";foo="";
+ StringBuilder extValue = new StringBuilder(len);
+ for (int i = 0; i < (len - extName.length()); i++) {
+ extValue.append(""x"");
+ }
+
+ String[] request = new String[]{
+ ""POST /echo-params.jsp HTTP/1.1"" + SimpleHttpClient.CRLF +
+ ""Host: any"" + SimpleHttpClient.CRLF +
+ ""Transfer-encoding: chunked"" + SimpleHttpClient.CRLF +
+ ""Content-Type: application/x-www-form-urlencoded"" +
+ SimpleHttpClient.CRLF +
+ ""Connection: close"" + SimpleHttpClient.CRLF +
+ SimpleHttpClient.CRLF +
+ ""3"" + extName + extValue.toString() + SimpleHttpClient.CRLF +
+ ""a=0"" + SimpleHttpClient.CRLF +
+ ""4"" + SimpleHttpClient.CRLF +
+ ""&b=1"" + SimpleHttpClient.CRLF +
+ ""0"" + SimpleHttpClient.CRLF +
+ SimpleHttpClient.CRLF };
+
+ TrailerClient client =
+ new TrailerClient(tomcat.getConnector().getLocalPort());
+ client.setRequest(request);
+
+ client.connect();
+ client.processRequest();
+
+ if (ok) {
+ assertTrue(client.isResponse200());
+ } else {
+ assertTrue(client.isResponse500());
+ }
+ }
+
+ @Test
+"
+-1," void sendPluginResult(PluginResult pluginResult) {
+ synchronized (this) {
+ if (!aborted) {
+ callbackContext.sendPluginResult(pluginResult);
+ }
+ }
+ }
+ }
+
+ /**
+ * Adds an interface method to an InputStream to return the number of bytes
+ * read from the raw stream. This is used to track total progress against
+ * the HTTP Content-Length header value from the server.
+ */
+"
+-1," public void setHttpClient(HttpClient httpClient) {
+ this.httpClient = httpClient;
+ }
+"
+-1," protected Log getLog() {
+ return log;
+ }
+
+ // ------------------------------------------------------------ Constructor
+
+
+"
+-1," public void doStart() throws Exception {
+ URI rootURI;
+ if (serverInfo != null) {
+ rootURI = serverInfo.resolveServer(configuredDir);
+ } else {
+ rootURI = configuredDir;
+ }
+ if (!rootURI.getScheme().equals(""file"")) {
+ throw new IllegalStateException(""FileKeystoreManager must have a root that's a local directory (not "" + rootURI + "")"");
+ }
+ directory = new File(rootURI);
+ if (!directory.exists() || !directory.isDirectory() || !directory.canRead()) {
+ throw new IllegalStateException(""FileKeystoreManager must have a root that's a valid readable directory (not "" + directory.getAbsolutePath() + "")"");
+ }
+ log.debug(""Keystore directory is "" + directory.getAbsolutePath());
+ }
+
+"
+-1," public void setMaxSize(String maxSize) {
+ this.maxSize = Long.parseLong(maxSize);
+ }
+
+ /**
+ * Sets the buffer size to be used.
+ *
+ * @param bufferSize
+ */
+ @Inject(value = StrutsConstants.STRUTS_MULTIPART_BUFFERSIZE, required = false)
+"
+-1," public void execute(FunctionContext context) {
+ // Verify that the cache exists before continuing.
+ // When this function is executed by a remote membership listener, it is
+ // being invoked before the cache is started.
+ Cache cache = verifyCacheExists();
+
+ // Register as membership listener
+ registerAsMembershipListener(cache);
+
+ // Register functions
+ registerFunctions();
+
+ // Return status
+ context.getResultSender().lastResult(Boolean.TRUE);
+ }
+
+"
+-1," public void setSocketBuffer(int socketBuffer) {
+ super.setSocketBuffer(socketBuffer);
+ outputBuffer.setSocketBuffer(socketBuffer);
+ }
+"
+-1," public void waitForAllNodes(int timeout) throws IOException, InterruptedException {
+ waitForAllNodes(jettys.size(), timeout);
+ }
+
+"
+-1," static MatcherType fromElement(Element elt) {
+ if (StringUtils.hasText(elt.getAttribute(ATT_MATCHER_TYPE))) {
+ return valueOf(elt.getAttribute(ATT_MATCHER_TYPE));
+ }
+
+ return ant;
+ }
+"
+-1," public int realReadBytes(byte cbuf[], int off, int len)
+ throws IOException;
+ }
+
+ /** Same as java.nio.channel.WrittableByteChannel.
+ */
+"
+-1," public void complete() {
+ if (log.isDebugEnabled()) {
+ logDebug(""complete "");
+ }
+ check();
+ request.getCoyoteRequest().action(ActionCode.ASYNC_COMPLETE, null);
+ }
+
+ @Override
+"
+-1," public void testContainsExpressionIsFalse() throws Exception {
+ // given
+ String anExpression = ""foo"";
+
+ // when
+ boolean actual = ComponentUtils.containsExpression(anExpression);
+
+ // then
+ assertFalse(actual);
+ }
+}
+
+class MockConfigurationProvider implements ConfigurationProvider {
+
+ public void destroy() {
+ }
+
+ public void init(Configuration configuration) throws ConfigurationException {
+ }
+
+ public boolean needsReload() {
+ return false;
+ }
+
+ public void loadPackages() throws ConfigurationException {
+ }
+
+ public void register(ContainerBuilder builder, LocatableProperties props) throws ConfigurationException {
+ builder.constant(StrutsConstants.STRUTS_TAG_ALTSYNTAX, ""false"");
+ }
+"
+-1," @Test(timeout = 1000L) public void testCeilLongMonths() throws Exception {
+ Calendar cal = Calendar.getInstance();
+ cal.set(Calendar.MONTH, Calendar.NOVEMBER);
+ new CronTab(""0 0 31 * *"").ceil(cal); // would infinite loop
+ }
+"
+-1," public static File unzip(File zip, File toDir, Predicate filter) throws IOException {
+ if (!toDir.exists()) {
+ FileUtils.forceMkdir(toDir);
+ }
+
+ Path targetDirNormalizedPath = toDir.toPath().normalize();
+ ZipFile zipFile = new ZipFile(zip);
+ try {
+ Enumeration extends ZipEntry> entries = zipFile.entries();
+ while (entries.hasMoreElements()) {
+ ZipEntry entry = entries.nextElement();
+ if (filter.test(entry)) {
+ File target = new File(toDir, entry.getName());
+
+ verifyInsideTargetDirectory(entry, target.toPath(), targetDirNormalizedPath);
+
+ if (entry.isDirectory()) {
+ throwExceptionIfDirectoryIsNotCreatable(target);
+ } else {
+ File parent = target.getParentFile();
+ throwExceptionIfDirectoryIsNotCreatable(parent);
+ copy(zipFile, entry, target);
+ }
+ }
+ }
+ return toDir;
+
+ } finally {
+ zipFile.close();
+ }
+ }
+
+"
+-1," public Authentication authenticate(Authentication req) throws AuthenticationException {
+ logger.debug(""Processing authentication request for "" + req.getName());
+
+ if (req.getCredentials() == null) {
+ BadCredentialsException e = new BadCredentialsException(""No password supplied"");
+ publish(new AuthenticationFailureBadCredentialsEvent(req, e));
+ throw e;
+ }
+
+ UaaUser user;
+ boolean passwordMatches = false;
+ user = getUaaUser(req);
+ if (user!=null) {
+ passwordMatches =
+ ((CharSequence) req.getCredentials()).length() != 0 && encoder.matches((CharSequence) req.getCredentials(), user.getPassword());
+ } else {
+ user = dummyUser;
+ }
+
+ if (!accountLoginPolicy.isAllowed(user, req)) {
+ logger.warn(""Login policy rejected authentication for "" + user.getUsername() + "", "" + user.getId()
+ + "". Ignoring login request."");
+ AuthenticationPolicyRejectionException e = new AuthenticationPolicyRejectionException(""Login policy rejected authentication"");
+ publish(new AuthenticationFailureLockedEvent(req, e));
+ throw e;
+ }
+
+ if (passwordMatches) {
+ logger.debug(""Password successfully matched for userId[""+user.getUsername()+""]:""+user.getId());
+
+ if (!allowUnverifiedUsers && !user.isVerified()) {
+ publish(new UnverifiedUserAuthenticationEvent(user, req));
+ logger.debug(""Account not verified: "" + user.getId());
+ throw new AccountNotVerifiedException(""Account not verified"");
+ }
+
+ int expiringPassword = getPasswordExpiresInMonths();
+ if (expiringPassword>0) {
+ Calendar cal = Calendar.getInstance();
+ cal.setTimeInMillis(user.getPasswordLastModified().getTime());
+ cal.add(Calendar.MONTH, expiringPassword);
+ if (cal.getTimeInMillis() < System.currentTimeMillis()) {
+ throw new PasswordExpiredException(""Your current password has expired. Please reset your password."");
+ }
+ }
+
+ Authentication success = new UaaAuthentication(
+ new UaaPrincipal(user),
+ user.getAuthorities(),
+ (UaaAuthenticationDetails) req.getDetails());
+
+ publish(new UserAuthenticationSuccessEvent(user, success));
+
+ return success;
+ }
+
+ if (user == dummyUser || user == null) {
+ logger.debug(""No user named '"" + req.getName() + ""' was found for origin:""+ origin);
+ publish(new UserNotFoundEvent(req));
+ } else {
+ logger.debug(""Password did not match for user "" + req.getName());
+ publish(new UserAuthenticationFailureEvent(user, req));
+ }
+ BadCredentialsException e = new BadCredentialsException(""Bad credentials"");
+ publish(new AuthenticationFailureBadCredentialsEvent(req, e));
+ throw e;
+ }
+
+"
+-1," public void test_SignedWithoutSignature() throws Exception {
+ JWT inputJwt = new JWT()
+ .setSubject(""123456789"")
+ .setIssuedAt(ZonedDateTime.now(ZoneOffset.UTC))
+ .setExpiration(ZonedDateTime.now(ZoneOffset.UTC).plusHours(2));
+
+ String encodedJWT = JWT.getEncoder().encode(inputJwt, HMACSigner.newSHA256Signer(""secret""));
+ String encodedJWTNoSignature = encodedJWT.substring(0, encodedJWT.lastIndexOf('.') + 1);
+
+ expectException(InvalidJWTSignatureException.class, () -> JWT.getDecoder().decode(encodedJWTNoSignature, HMACVerifier.newVerifier(""secret"")));
+
+ // Also cannot be decoded even if the caller calls decode w/out a signature because the header still indicates a signature algorithm.
+ expectException(InvalidJWTSignatureException.class, () -> JWT.getDecoder().decode(encodedJWTNoSignature));
+ }
+
+ @Test
+"
+-1," public void initJdbcScimUserProvisioningTests() throws Exception {
+ db = new JdbcScimUserProvisioning(jdbcTemplate, new JdbcPagingListFactory(jdbcTemplate, limitSqlAdapter));
+ zoneDb = new JdbcIdentityZoneProvisioning(jdbcTemplate);
+ providerDb = new JdbcIdentityProviderProvisioning(jdbcTemplate);
+ ScimSearchQueryConverter filterConverter = new ScimSearchQueryConverter();
+ Map replaceWith = new HashMap();
+ replaceWith.put(""emails\\.value"", ""email"");
+ replaceWith.put(""groups\\.display"", ""authorities"");
+ replaceWith.put(""phoneNumbers\\.value"", ""phoneNumber"");
+ filterConverter.setAttributeNameMapper(new SimpleAttributeNameMapper(replaceWith));
+ db.setQueryConverter(filterConverter);
+ BCryptPasswordEncoder pe = new BCryptPasswordEncoder(4);
+
+ existingUserCount = jdbcTemplate.queryForInt(""select count(id) from users"");
+
+ defaultIdentityProviderId = jdbcTemplate.queryForObject(""select id from identity_provider where origin_key = ? and identity_zone_id = ?"", String.class, Origin.UAA, ""uaa"");
+
+ addUser(JOE_ID, ""joe"", pe.encode(""joespassword""), ""joe@joe.com"", ""Joe"", ""User"", ""+1-222-1234567"", defaultIdentityProviderId, ""uaa"");
+ addUser(MABEL_ID, ""mabel"", pe.encode(""mabelspassword""), ""mabel@mabel.com"", ""Mabel"", ""User"", """", defaultIdentityProviderId, ""uaa"");
+ }
+
+"
+-1," public void setUp() throws Exception {
+ lc = new LoggerContext();
+ lc.setName(""testContext"");
+ logger = lc.getLogger(LoggerSerializationTest.class);
+ // create the byte output stream
+ bos = new ByteArrayOutputStream();
+ oos = new ObjectOutputStream(bos);
+ whitelist = LogbackClassicSerializationHelper.getWhilelist();
+ whitelist.add(Foo.class.getName());
+ }
+
+ @After
+"
+-1," public void withFieldsAndXpath() throws Exception {
+ File tmpdir = File.createTempFile(""test"", ""tmp"", TEMP_DIR);
+ tmpdir.delete();
+ tmpdir.mkdir();
+ tmpdir.deleteOnExit();
+ createFile(tmpdir, ""x.xsl"", xsl.getBytes(""UTF-8""), false);
+ Map entityAttrs = createMap(""name"", ""e"", ""url"", ""cd.xml"",
+ XPathEntityProcessor.FOR_EACH, ""/catalog/cd"");
+ List fields = new ArrayList();
+ fields.add(createMap(""column"", ""title"", ""xpath"", ""/catalog/cd/title""));
+ fields.add(createMap(""column"", ""artist"", ""xpath"", ""/catalog/cd/artist""));
+ fields.add(createMap(""column"", ""year"", ""xpath"", ""/catalog/cd/year""));
+ Context c = getContext(null,
+ new VariableResolverImpl(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs);
+ XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
+ xPathEntityProcessor.init(c);
+ List> result = new ArrayList>();
+ while (true) {
+ Map row = xPathEntityProcessor.nextRow();
+ if (row == null)
+ break;
+ result.add(row);
+ }
+ assertEquals(3, result.size());
+ assertEquals(""Empire Burlesque"", result.get(0).get(""title""));
+ assertEquals(""Bonnie Tyler"", result.get(1).get(""artist""));
+ assertEquals(""1982"", result.get(2).get(""year""));
+ }
+
+ @Test
+"
+-1," public void handle(Map record, String xpath);
+ }
+
+"
+-1," public int hashCode() {
+ final int prime = 31;
+ int result = super.hashCode();
+ result = prime * result + ( ( getLocation().getMember() == null ) ? 0 : getLocation().getMember().hashCode() );
+ return result;
+ }
+
+ @Override
+"
+-1," public int getTransportGuaranteeRedirectStatus() {
+ return transportGuaranteeRedirectStatus;
+ }
+
+
+ /**
+ * Set the HTTP status code used when the container needs to issue an HTTP
+ * redirect to meet the requirements of a configured transport guarantee.
+ *
+ * @param transportGuaranteeRedirectStatus The status to use. This value is
+ * not validated
+ */
+"
+-1," public void setFireRequestListenersOnForwards(boolean enable) {
+ fireRequestListenersOnForwards = enable;
+ }
+
+
+ @Override
+"
+-1," public void cleanUp() {
+ if (multi != null) {
+ multi.cleanUp();
+ }
+ }
+
+"
+-1," public final int getDegree()
+ {
+ return mDegree;
+ }
+
+ /**
+ * Returns the fieldpolynomial as a new Bitstring.
+ *
+ * @return a copy of the fieldpolynomial as a new Bitstring
+ */
+"
+-1," public void setXWorkConverter(XWorkConverter conv) {
+ this.defaultConverter = new OgnlTypeConverterWrapper(conv);
+ }
+
+ @Inject(XWorkConstants.DEV_MODE)
+"
+-1," public void testJvmDecoder1() {
+ // This should trigger an error but currently passes. Once the JVM is
+ // fixed, s/false/true/ and s/20/13/
+ doJvmDecoder(SRC_BYTES_1, false, 20);
+ }
+
+
+ @Test
+"
+-1," protected void doStop() throws Exception {
+ super.doStop();
+ // ensure client is closed when stopping
+ if (client != null && !client.isClosed()) {
+ client.close();
+ }
+ client = null;
+ }
+
+"
+-1," public O transform(final Object input) {
+ if (input == null) {
+ return null;
+ }
+ try {
+ final Class> cls = input.getClass();
+ final Method method = cls.getMethod(iMethodName, iParamTypes);
+ return (O) method.invoke(input, iArgs);
+ } catch (final NoSuchMethodException ex) {
+ throw new FunctorException(""InvokerTransformer: The method '"" + iMethodName + ""' on '"" +
+ input.getClass() + ""' does not exist"");
+ } catch (final IllegalAccessException ex) {
+ throw new FunctorException(""InvokerTransformer: The method '"" + iMethodName + ""' on '"" +
+ input.getClass() + ""' cannot be accessed"");
+ } catch (final InvocationTargetException ex) {
+ throw new FunctorException(""InvokerTransformer: The method '"" + iMethodName + ""' on '"" +
+ input.getClass() + ""' threw an exception"", ex);
+ }
+ }
+
+"
+-1," public String getCompression() {
+ switch (compressionLevel) {
+ case 0:
+ return ""off"";
+ case 1:
+ return ""on"";
+ case 2:
+ return ""force"";
+ }
+ return ""off"";
+ }
+
+
+ /**
+ * Set compression level.
+ */
+"
+-1," public Reader getData(String query) {
+ return new StringReader(xml);
+ }
+ };
+ }
+
+"
+-1," public String getInfo() {
+
+ return (info);
+
+ }
+
+
+"
+-1," public void setMethods(Set methods) {
+ this.methods = new HashSet<>();
+ for (String method : methods) {
+ this.methods.add(method.toUpperCase());
+ }
+ }
+
+ /**
+ * @param authenticationEntryPoint the authenticationEntryPoint to set
+ */
+"
+-1," public void run() {
+ synchronized (context) {
+ File file = context.targetFile;
+ if (file != null) {
+ file.delete();
+ }
+ // Trigger the abort callback immediately to minimize latency between it and abort() being called.
+ JSONObject error = createFileTransferError(ABORTED_ERR, context.source, context.target, null, -1, null);
+ context.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, error));
+ context.aborted = true;
+ if (context.connection != null) {
+ context.connection.disconnect();
+ }
+ }
+ }
+ });
+ }
+ }
+}
+"
+-1," public Iterator getGroups() {
+
+ synchronized (groups) {
+ return (groups.iterator());
+ }
+
+ }
+
+
+ /**
+ * Return the set of {@link Role}s assigned specifically to this user.
+ */
+ @Override
+"
+-1," public static Encryptor getInstance() throws EncryptionException {
+ if ( singletonInstance == null ) {
+ synchronized ( JavaEncryptor.class ) {
+ if ( singletonInstance == null ) {
+ singletonInstance = new JavaEncryptor();
+ }
+ }
+ }
+ return singletonInstance;
+ }
+
+"
+-1," private ApplicationContext getContext() {
+ return getSharedObject(ApplicationContext.class);
+ }
+
+ /**
+ * Allows configuring OpenID based authentication.
+ *
+ * Example Configurations
+ *
+ * A basic example accepting the defaults and not using attribute exchange:
+ *
+ *
+ * @Configuration
+ * @EnableWebSecurity
+ * public class OpenIDLoginConfig extends WebSecurityConfigurerAdapter {
+ *
+ * @Override
+ * protected void configure(HttpSecurity http) {
+ * http.authorizeRequests().antMatchers("/**").hasRole("USER").and().openidLogin()
+ * .permitAll();
+ * }
+ *
+ * @Override
+ * protected void configure(AuthenticationManagerBuilder auth) throws Exception {
+ * auth.inMemoryAuthentication()
+ * // the username must match the OpenID of the user you are
+ * // logging in with
+ * .withUser(
+ * "https://www.google.com/accounts/o8/id?id=lmkCn9xzPdsxVwG7pjYMuDgNNdASFmobNkcRPaWU")
+ * .password("password").roles("USER");
+ * }
+ * }
+ *
+ *
+ * A more advanced example demonstrating using attribute exchange and providing a
+ * custom AuthenticationUserDetailsService that will make any user that authenticates
+ * a valid user.
+ *
+ *
+ * @Configuration
+ * @EnableWebSecurity
+ * public class OpenIDLoginConfig extends WebSecurityConfigurerAdapter {
+ *
+ * @Override
+ * protected void configure(HttpSecurity http) {
+ * http.authorizeRequests()
+ * .antMatchers("/**")
+ * .hasRole("USER")
+ * .and()
+ * .openidLogin()
+ * .loginPage("/login")
+ * .permitAll()
+ * .authenticationUserDetailsService(
+ * new AutoProvisioningUserDetailsService())
+ * .attributeExchange("https://www.google.com/.*").attribute("email")
+ * .type("http://axschema.org/contact/email").required(true).and()
+ * .attribute("firstname").type("http://axschema.org/namePerson/first")
+ * .required(true).and().attribute("lastname")
+ * .type("http://axschema.org/namePerson/last").required(true).and().and()
+ * .attributeExchange(".*yahoo.com.*").attribute("email")
+ * .type("http://schema.openid.net/contact/email").required(true).and()
+ * .attribute("fullname").type("http://axschema.org/namePerson")
+ * .required(true).and().and().attributeExchange(".*myopenid.com.*")
+ * .attribute("email").type("http://schema.openid.net/contact/email")
+ * .required(true).and().attribute("fullname")
+ * .type("http://schema.openid.net/namePerson").required(true);
+ * }
+ * }
+ *
+ * public class AutoProvisioningUserDetailsService implements
+ * AuthenticationUserDetailsService<OpenIDAuthenticationToken> {
+ * public UserDetails loadUserDetails(OpenIDAuthenticationToken token)
+ * throws UsernameNotFoundException {
+ * return new User(token.getName(), "NOTUSED",
+ * AuthorityUtils.createAuthorityList("ROLE_USER"));
+ * }
+ * }
+ *
+ *
+ * @return the {@link OpenIDLoginConfigurer} for further customizations.
+ *
+ * @throws Exception
+ * @see OpenIDLoginConfigurer
+ */
+"
+-1," public URL getConfigFile() { return configFile; }
+ @Override
+"
+-1," void version(String version);
+
+ @LogMessage(level = INFO)
+ @Message(id = 2, value = ""Ignoring XML configuration."")
+"
+-1," public void copyToRepository(InputStream source, int size, Artifact destination, FileWriteMonitor monitor) throws IOException {
+ if(!destination.isResolved()) {
+ throw new IllegalArgumentException(""Artifact ""+destination+"" is not fully resolved"");
+ }
+ // is this a writable repository
+ if (!rootFile.canWrite()) {
+ throw new IllegalStateException(""This repository is not writable: "" + rootFile.getAbsolutePath() + "")"");
+ }
+
+ // where are we going to install the file
+ File location = getLocation(destination);
+
+ // assure that there isn't already a file installed at the specified location
+ if (location.exists()) {
+ throw new IllegalArgumentException(""Destination "" + location.getAbsolutePath() + "" already exists!"");
+ }
+
+ ArtifactTypeHandler typeHandler = typeHandlers.get(destination.getType());
+ if (typeHandler == null) typeHandler = DEFAULT_TYPE_HANDLER;
+ typeHandler.install(source, size, destination, monitor, location);
+
+ if (destination.getType().equalsIgnoreCase(""car"")) {
+ log.debug(""Installed module configuration; id={}; location={}"", destination, location);
+ }
+ }
+"
+-1," public void testRead7ZipMultiVolumeArchiveForFile() throws IOException {
+ final File file = getFile(""apache-maven-2.2.1.zip.001"");
+ ZipFile zf = new ZipFile(file);
+ zf.close();
+ }
+"
+-1," public boolean getMapperDirectoryRedirectEnabled();
+"
+-1," public void testFloatInHeader() {
+ Response response = WebClient.create(endPoint + TIKA_PATH)
+ .type(""application/pdf"")
+ .accept(""text/plain"")
+ .header(TikaResource.X_TIKA_PDF_HEADER_PREFIX +
+ ""averageCharTolerance"",
+ ""2.0"")
+ .put(ClassLoader.getSystemResourceAsStream(""testOCR.pdf""));
+ assertEquals(200, response.getStatus());
+
+ }
+"
+-1," private void testKeyGenerationAll()
+ throws Exception
+ {
+ testKeyGeneration(1024);
+ testKeyGeneration(2048);
+ testKeyGeneration(3072);
+ }
+
+"
+-1," public static String normalizeChildProjectValue(String actualValue){
+ actualValue = actualValue.replaceAll(""(,[ ]*,)"", "", "");
+ actualValue = actualValue.replaceAll(""(^,|,$)"", """");
+ return actualValue.trim();
+ }
+
+"
+-1," public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException {
+ if (recoveryActionToBlock.equals(action) || requestBlocked.getCount() == 0) {
+ logger.info(""--> preventing {} request"", action);
+ requestBlocked.countDown();
+ if (dropRequests) {
+ return;
+ }
+ throw new ConnectTransportException(node, ""DISCONNECT: prevented "" + action + "" request"");
+ }
+ transport.sendRequest(node, requestId, action, request, options);
+ }
+ }
+}
+"
+-1," protected void stopInternal() throws LifecycleException {
+
+ super.stopInternal();
+
+ // Close any open DB connection
+ close(this.dbConnection);
+
+ }
+
+
+"
+-1," public static String getPathWithinApplication(HttpServletRequest request) {
+ String contextPath = getContextPath(request);
+ String requestUri = getRequestUri(request);
+ if (StringUtils.startsWithIgnoreCase(requestUri, contextPath)) {
+ // Normal case: URI contains context path.
+ String path = requestUri.substring(contextPath.length());
+ return (StringUtils.hasText(path) ? path : ""/"");
+ } else {
+ // Special case: rather unusual.
+ return requestUri;
+ }
+ }
+
+ /**
+ * Return the request URI for the given request, detecting an include request
+ * URL if called within a RequestDispatcher include.
+ * As the value returned by request.getRequestURI()
is not
+ * decoded by the servlet container, this method will decode it.
+ *
The URI that the web container resolves should be correct, but some
+ * containers like JBoss/Jetty incorrectly include "";"" strings like "";jsessionid""
+ * in the URI. This method cuts off such incorrect appendices.
+ *
+ * @param request current HTTP request
+ * @return the request URI
+ */
+"
+-1," public boolean getAllowCasualMultipartParsing();
+
+
+ /**
+ * Set to true
to allow requests mapped to servlets that
+ * do not explicitly declare @MultipartConfig or have
+ * <multipart-config> specified in web.xml to parse
+ * multipart/form-data requests.
+ *
+ * @param allowCasualMultipartParsing true
to allow such
+ * casual parsing, false
otherwise.
+ */
+"
+-1," public void setCharset(Charset charset) {
+ if( !byteC.isNull() ) {
+ // if the encoding changes we need to reset the conversion results
+ charC.recycle();
+ hasStrValue=false;
+ }
+ byteC.setCharset(charset);
+ }
+
+ /**
+ * Sets the content to be a char[]
+ *
+ * @param c the bytes
+ * @param off the start offset of the bytes
+ * @param len the length of the bytes
+ */
+"
+-1," public final GF2nElement convert(GF2nElement elem, GF2nField basis)
+ throws RuntimeException
+ {
+ if (basis == this)
+ {
+ return (GF2nElement)elem.clone();
+ }
+ if (fieldPolynomial.equals(basis.fieldPolynomial))
+ {
+ return (GF2nElement)elem.clone();
+ }
+ if (mDegree != basis.mDegree)
+ {
+ throw new RuntimeException(""GF2nField.convert: B1 has a""
+ + "" different degree and thus cannot be coverted to!"");
+ }
+
+ int i;
+ GF2Polynomial[] COBMatrix;
+ i = fields.indexOf(basis);
+ if (i == -1)
+ {
+ computeCOBMatrix(basis);
+ i = fields.indexOf(basis);
+ }
+ COBMatrix = (GF2Polynomial[])matrices.elementAt(i);
+
+ GF2nElement elemCopy = (GF2nElement)elem.clone();
+ if (elemCopy instanceof GF2nONBElement)
+ {
+ // remember: ONB treats its bits in reverse order
+ ((GF2nONBElement)elemCopy).reverseOrder();
+ }
+ GF2Polynomial bs = new GF2Polynomial(mDegree, elemCopy.toFlexiBigInt());
+ bs.expandN(mDegree);
+ GF2Polynomial result = new GF2Polynomial(mDegree);
+ for (i = 0; i < mDegree; i++)
+ {
+ if (bs.vectorMult(COBMatrix[i]))
+ {
+ result.setBit(mDegree - 1 - i);
+ }
+ }
+ if (basis instanceof GF2nPolynomialField)
+ {
+ return new GF2nPolynomialElement((GF2nPolynomialField)basis,
+ result);
+ }
+ else if (basis instanceof GF2nONBField)
+ {
+ GF2nONBElement res = new GF2nONBElement((GF2nONBField)basis,
+ result.toFlexiBigInt());
+ // TODO Remember: ONB treats its Bits in reverse order !!!
+ res.reverseOrder();
+ return res;
+ }
+ else
+ {
+ throw new RuntimeException(
+ ""GF2nField.convert: B1 must be an instance of ""
+ + ""GF2nPolynomialField or GF2nONBField!"");
+ }
+
+ }
+
+"
+-1," public CsrfConfigurer csrfTokenRepository(
+ CsrfTokenRepository csrfTokenRepository) {
+ Assert.notNull(csrfTokenRepository, ""csrfTokenRepository cannot be null"");
+ this.csrfTokenRepository = csrfTokenRepository;
+ return this;
+ }
+
+ /**
+ * Specify the {@link RequestMatcher} to use for determining when CSRF should be
+ * applied. The default is to ignore GET, HEAD, TRACE, OPTIONS and process all other
+ * requests.
+ *
+ * @param requireCsrfProtectionMatcher the {@link RequestMatcher} to use
+ * @return the {@link CsrfConfigurer} for further customizations
+ */
+"
+-1," private BigInteger[] derDecode(
+ byte[] encoding)
+ throws IOException
+ {
+ ASN1Sequence s = (ASN1Sequence)ASN1Primitive.fromByteArray(encoding);
+ if (s.size() != 2)
+ {
+ throw new IOException(""malformed signature"");
+ }
+ if (!Arrays.areEqual(encoding, s.getEncoded(ASN1Encoding.DER)))
+ {
+ throw new IOException(""malformed signature"");
+ }
+
+ return new BigInteger[]{
+ ((ASN1Integer)s.getObjectAt(0)).getValue(),
+ ((ASN1Integer)s.getObjectAt(1)).getValue()
+ };
+ }
+
+"
+-1," protected UserDetailsContextMapper getUserDetailsContextMapper() {
+ return userDetailsContextMapper;
+ }
+"
+-1," public abstract void perform() throws IOException;
+"
+-1," public long getFailureCount() {
+ return failureCounter.get();
+ }
+
+"
+-1," public long getTimestamp() {
+ return timestamp;
+ }
+ }
+}
+"
+-1," public int getTransportGuaranteeRedirectStatus() {
+ return transportGuaranteeRedirectStatus;
+ }
+
+
+ /**
+ * Set the HTTP status code used when the container needs to issue an HTTP
+ * redirect to meet the requirements of a configured transport guarantee.
+ *
+ * @param transportGuaranteeRedirectStatus The status to use. This value is
+ * not validated
+ */
+"
+-1," public void setRedirectUri(String redirectUri) {
+ this.redirectUri = redirectUri;
+ }
+"
+-1," public boolean isDoLoop() {
+ return iDoLoop;
+ }
+
+"
+-1," public void testCompatibilityWith_v1_0_12() throws IOException, ClassNotFoundException {
+ FileInputStream fis = new FileInputStream(SERIALIZATION_PREFIX + ""logger_v1.0.12.ser"");
+ ObjectInputStream ois = new ObjectInputStream(fis);
+ Logger a = (Logger) ois.readObject();
+ ois.close();
+ assertEquals(""a"", a.getName());
+ }
+
+"
+-1," public Tomcat getTomcatInstance() {
+ return tomcat;
+ }
+
+ /**
+ * Make the Tomcat instance preconfigured with test/webapp available to
+ * sub-classes.
+ * @param addJstl Should JSTL support be added to the test webapp
+ * @param start Should the Tomcat instance be started
+ *
+ * @return A Tomcat instance pre-configured with the web application located
+ * at test/webapp
+ *
+ * @throws LifecycleException If a problem occurs while starting the
+ * instance
+ */
+"
+-1," @CheckForNull public TimeZone getTimeZone() {
+ if (this.specTimezone == null) {
+ return null;
+ }
+ return TimeZone.getTimeZone(this.specTimezone);
+ }
+"
+-1," public int getCacheSize() {
+ return cacheSize;
+ }
+
+
+ /**
+ * @param cacheSize The cacheSize to set.
+ */
+"
+-1," protected Log getLog() {
+ return log;
+ }
+
+
+ @Override
+"
+-1," public X509Certificate generateCert(PublicKey publicKey,
+ PrivateKey privateKey, String sigalg, int validity, String cn,
+ String ou, String o, String l, String st, String c)
+ throws java.security.SignatureException,
+ java.security.InvalidKeyException {
+ X509V1CertificateGenerator certgen = new X509V1CertificateGenerator();
+
+ // issuer dn
+ Vector order = new Vector();
+ Hashtable attrmap = new Hashtable();
+
+ if (cn != null) {
+ attrmap.put(X509Principal.CN, cn);
+ order.add(X509Principal.CN);
+ }
+
+ if (ou != null) {
+ attrmap.put(X509Principal.OU, ou);
+ order.add(X509Principal.OU);
+ }
+
+ if (o != null) {
+ attrmap.put(X509Principal.O, o);
+ order.add(X509Principal.O);
+ }
+
+ if (l != null) {
+ attrmap.put(X509Principal.L, l);
+ order.add(X509Principal.L);
+ }
+
+ if (st != null) {
+ attrmap.put(X509Principal.ST, st);
+ order.add(X509Principal.ST);
+ }
+
+ if (c != null) {
+ attrmap.put(X509Principal.C, c);
+ order.add(X509Principal.C);
+ }
+
+ X509Principal issuerDN = new X509Principal(order, attrmap);
+ certgen.setIssuerDN(issuerDN);
+
+ // validity
+ long curr = System.currentTimeMillis();
+ long untill = curr + (long) validity * 24 * 60 * 60 * 1000;
+
+ certgen.setNotBefore(new Date(curr));
+ certgen.setNotAfter(new Date(untill));
+
+ // subject dn
+ certgen.setSubjectDN(issuerDN);
+
+ // public key
+ certgen.setPublicKey(publicKey);
+
+ // signature alg
+ certgen.setSignatureAlgorithm(sigalg);
+
+ // serial number
+ certgen.setSerialNumber(new BigInteger(String.valueOf(curr)));
+
+ // make certificate
+ return certgen.generateX509Certificate(privateKey);
+ }
+"
+-1," public String getRmiBindAddress() {
+ return rmiBindAddress;
+ }
+
+ /**
+ * Set the inet address on which the Platform RMI server is exported.
+ * @param theRmiBindAddress The textual representation of inet address
+ */
+"
+-1," public ClientLockoutPolicyRetriever setEnabled(boolean enabled) {
+ isEnabled = enabled;
+ return this;
+ }
+"
+-1," private Page createPage()
+ {
+ if (pageCreator == null)
+ {
+ return null;
+ }
+ else
+ {
+ return pageCreator.createPage();
+ }
+ }
+
+ /**
+ * @see org.apache.wicket.Component#onBeforeRender()
+ */
+ @Override
+"
+-1," public Object getTarget() {
+ Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER);
+ return this;
+ }
+
+ @Override
+"
+-1," public void testSpecifiedIndexUnavailable_multipleIndices() throws Exception {
+ createIndex(""test1"");
+ ensureYellow();
+
+ // Verify defaults
+ verify(search(""test1"", ""test2""), true);
+ verify(msearch(null, ""test1"", ""test2""), true);
+ verify(count(""test1"", ""test2""), true);
+ verify(clearCache(""test1"", ""test2""), true);
+ verify(_flush(""test1"", ""test2""),true);
+ verify(segments(""test1"", ""test2""), true);
+ verify(stats(""test1"", ""test2""), true);
+ verify(status(""test1"", ""test2""), true);
+ verify(optimize(""test1"", ""test2""), true);
+ verify(refresh(""test1"", ""test2""), true);
+ verify(validateQuery(""test1"", ""test2""), true);
+ verify(aliasExists(""test1"", ""test2""), true);
+ verify(typesExists(""test1"", ""test2""), true);
+ verify(deleteByQuery(""test1"", ""test2""), true);
+ verify(percolate(""test1"", ""test2""), true);
+ verify(mpercolate(null, ""test1"", ""test2""), false);
+ verify(suggest(""test1"", ""test2""), true);
+ verify(getAliases(""test1"", ""test2""), true);
+ verify(getFieldMapping(""test1"", ""test2""), true);
+ verify(getMapping(""test1"", ""test2""), true);
+ verify(getWarmer(""test1"", ""test2""), true);
+ verify(getSettings(""test1"", ""test2""), true);
+
+ IndicesOptions options = IndicesOptions.strictExpandOpen();
+ verify(search(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(msearch(options, ""test1"", ""test2""), true);
+ verify(count(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(clearCache(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(_flush(""test1"", ""test2"").setIndicesOptions(options),true);
+ verify(segments(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(stats(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(status(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(optimize(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(refresh(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(validateQuery(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(aliasExists(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(typesExists(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(deleteByQuery(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(percolate(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(mpercolate(options, ""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(suggest(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(getAliases(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(getFieldMapping(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(getMapping(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(getWarmer(""test1"", ""test2"").setIndicesOptions(options), true);
+ verify(getSettings(""test1"", ""test2"").setIndicesOptions(options), true);
+
+ options = IndicesOptions.lenientExpandOpen();
+ verify(search(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(msearch(options, ""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(count(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(clearCache(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(_flush(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(segments(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(stats(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(status(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(optimize(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(refresh(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(validateQuery(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(aliasExists(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(typesExists(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(deleteByQuery(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(percolate(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(mpercolate(options, ""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(suggest(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getAliases(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getFieldMapping(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getMapping(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getWarmer(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getSettings(""test1"", ""test2"").setIndicesOptions(options), false);
+
+ options = IndicesOptions.strictExpandOpen();
+ assertAcked(prepareCreate(""test2""));
+ ensureYellow();
+ verify(search(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(msearch(options, ""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(count(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(clearCache(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(_flush(""test1"", ""test2"").setIndicesOptions(options),false);
+ verify(segments(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(stats(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(status(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(optimize(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(refresh(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(validateQuery(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(aliasExists(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(typesExists(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(deleteByQuery(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(percolate(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(mpercolate(options, ""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(suggest(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getAliases(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getFieldMapping(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getMapping(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getWarmer(""test1"", ""test2"").setIndicesOptions(options), false);
+ verify(getSettings(""test1"", ""test2"").setIndicesOptions(options), false);
+ }
+
+ @Test
+"
+-1," public boolean isEraseCredentialsAfterAuthentication() {
+ return false;
+ }
+"
+-1," public void copyToRepository(InputStream source, int size, Artifact destination, FileWriteMonitor monitor) throws IOException {
+ if(!destination.isResolved()) {
+ throw new IllegalArgumentException(""Artifact ""+destination+"" is not fully resolved"");
+ }
+ // is this a writable repository
+ if (!rootFile.canWrite()) {
+ throw new IllegalStateException(""This repository is not writable: "" + rootFile.getAbsolutePath() + "")"");
+ }
+
+ // where are we going to install the file
+ File location = getLocation(destination);
+
+ // assure that there isn't already a file installed at the specified location
+ if (location.exists()) {
+ throw new IllegalArgumentException(""Destination "" + location.getAbsolutePath() + "" already exists!"");
+ }
+
+ ArtifactTypeHandler typeHandler = typeHandlers.get(destination.getType());
+ if (typeHandler == null) typeHandler = DEFAULT_TYPE_HANDLER;
+ typeHandler.install(source, size, destination, monitor, location);
+
+ if (destination.getType().equalsIgnoreCase(""car"")) {
+ log.debug(""Installed module configuration; id="" + destination + ""; location="" + location);
+ }
+ }
+"
+-1," public void setAllowJavaSerializedObject(boolean allowJavaSerializedObject) {
+ // need to override and call super for component docs
+ super.setAllowJavaSerializedObject(allowJavaSerializedObject);
+ }
+"
+-1," public void testChunkHeaderCRLF() throws Exception {
+ doTestChunkingCRLF(true, true, true, true, true, true);
+ }
+
+ @Test
+"
+-1," protected Log getLog() {
+ return log;
+ }
+
+ // ----------------------------------------------------------- Constructors
+
+
+"
+-1," void readRequest(HttpServletRequest request, HttpMessage message);
+
+ /**
+ * Parses the body from a {@link org.apache.camel.http.common.HttpMessage}
+ *
+ * @param httpMessage the http message
+ * @return the parsed body returned as either a {@link java.io.InputStream} or a {@link java.io.Reader}
+ * depending on the {@link #setUseReaderForPayload(boolean)} property.
+ * @throws java.io.IOException can be thrown
+ */
+"
+-1," public void setUp() throws Exception {
+ provider = new ActiveDirectoryLdapAuthenticationProvider(""mydomain.eu"", ""ldap://192.168.1.200/"");
+ }
+
+ @Test
+"
+-1," public Collection getRequiredPermissions(String regionName) {
+ return Collections.singletonList(new ResourcePermission(ResourcePermission.Resource.DATA,
+ ResourcePermission.Operation.READ, regionName));
+ }
+
+"
+-1," public void testConstructor1()
+ throws Exception
+ {
+ SimpleBindRequest bindRequest = new SimpleBindRequest();
+ bindRequest = bindRequest.duplicate();
+
+ assertNotNull(bindRequest.getBindDN());
+ assertEquals(bindRequest.getBindDN(), """");
+
+ assertNotNull(bindRequest.getPassword());
+ assertEquals(bindRequest.getPassword().stringValue(), """");
+
+ assertNotNull(bindRequest.getControls());
+ assertEquals(bindRequest.getControls().length, 0);
+
+ assertEquals(bindRequest.getBindType(), ""SIMPLE"");
+
+ SimpleBindRequest rebindRequest =
+ bindRequest.getRebindRequest(getTestHost(), getTestPort());
+ assertNotNull(bindRequest.getRebindRequest(getTestHost(),
+ getTestPort()));
+ assertEquals(rebindRequest.getBindDN(),
+ bindRequest.getBindDN());
+ assertEquals(rebindRequest.getPassword(),
+ bindRequest.getPassword());
+
+ assertEquals(bindRequest.getProtocolOpType(),
+ LDAPMessage.PROTOCOL_OP_TYPE_BIND_REQUEST);
+
+ bindRequest.getLastMessageID();
+
+ assertNotNull(bindRequest.encodeProtocolOp());
+
+ assertNotNull(bindRequest.toString());
+
+ final ArrayList toCodeLines = new ArrayList(10);
+ bindRequest.toCode(toCodeLines, ""foo"", 0, false);
+ assertFalse(toCodeLines.isEmpty());
+
+ toCodeLines.clear();
+ bindRequest.toCode(toCodeLines, ""bar"", 4, true);
+ assertFalse(toCodeLines.isEmpty());
+ }
+
+
+
+ /**
+ * Tests the second constructor, which takes a bind DN and password, using
+ * non-null, non-empty values.
+ *
+ * @throws Exception If an unexpected problem occurs.
+ */
+ @Test()
+"
+-1," public PackageConfig getPackageConfig(String name) {
+ return packageContexts.get(name);
+ }
+
+"
+-1," public String[] getRoles(Principal principal) {
+ if (principal instanceof GenericPrincipal) {
+ return ((GenericPrincipal) principal).getRoles();
+ }
+
+ String className = principal.getClass().getSimpleName();
+ throw new IllegalStateException(sm.getString(""realmBase.cannotGetRoles"", className));
+ }
+"
+-1," public void destroy() {
+ normalView = null;
+ viewViews = null;
+ viewServers = null;
+ viewGraphs = null;
+ pageView = null;
+ editView = null;
+ addView = null;
+ addGraph = null;
+ editGraph = null;
+ viewServer = null;
+ editServer = null;
+ addServer = null;
+ helpView = null;
+ editNormalView = null;
+ super.destroy();
+ }
+"
+-1," public ParameterMetaData build() {
+ return new ParameterMetaData(
+ parameterIndex,
+ name,
+ parameterType,
+ adaptOriginsAndImplicitGroups( getConstraints() ),
+ isCascading(),
+ getGroupConversions(),
+ requiresUnwrapping()
+ );
+ }
+ }
+}
+"
+-1," public String createDB(String dbName) {
+
+ // ensure there are no illegal chars in DB name
+ InputUtils.validateSafeInput(dbName);
+
+ String result = DB_CREATED_MSG + "": "" + dbName;
+
+ Connection conn = null;
+ try {
+ conn = DerbyConnectionUtil.getDerbyConnection(dbName,
+ DerbyConnectionUtil.CREATE_DB_PROP);
+ } catch (Throwable e) {
+ if (e instanceof SQLException) {
+ result = getSQLError((SQLException) e);
+ } else {
+ result = e.getMessage();
+ }
+ } finally {
+ // close DB connection
+ try {
+ if (conn != null) {
+ conn.close();
+ }
+ } catch (SQLException e) {
+ result = ""Problem closing DB connection"";
+ }
+ }
+
+ return result;
+ }
+
+"
+-1," protected void onUnsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, AuthenticationException failed) throws IOException {
+ super.onUnsuccessfulAuthentication(request, response, failed);
+ LOGGER.log(Level.INFO, ""Login attempt failed"", failed);
+ }
+
+"
+-1," public static File resolve(File[] roots, String path) {
+ for (File root : roots) {
+ File file = new File(path);
+ final File normalizedPath;
+ try {
+ if (file.isAbsolute()) {
+ normalizedPath = file.getCanonicalFile();
+ } else {
+ normalizedPath = new File(root, path).getCanonicalFile();
+ }
+ } catch (IOException ex) {
+ continue;
+ }
+ if(normalizedPath.getAbsolutePath().startsWith(root.getAbsolutePath())) {
+ return normalizedPath;
+ }
+ }
+ return null;
+ }
+
+
+"
+-1," public Collection getRequiredPermissions(String regionName) {
+ return Collections.singletonList(ResourcePermissions.CLUSTER_MANAGE);
+ }
+
+"
+-1," private static ErrorPage findErrorPage
+ (Context context, Throwable exception) {
+
+ if (exception == null) {
+ return (null);
+ }
+ Class> clazz = exception.getClass();
+ String name = clazz.getName();
+ while (!Object.class.equals(clazz)) {
+ ErrorPage errorPage = context.findErrorPage(name);
+ if (errorPage != null) {
+ return (errorPage);
+ }
+ clazz = clazz.getSuperclass();
+ if (clazz == null) {
+ break;
+ }
+ name = clazz.getName();
+ }
+ return (null);
+
+ }
+"
+-1," protected void setUp() throws Exception {
+ super.setUp();
+ req = new MockHttpServletRequest();
+ req.setupGetParameterMap(new HashMap());
+ req.setupGetContextPath(""/my/namespace"");
+
+ config = new DefaultConfiguration();
+ PackageConfig pkg = new PackageConfig.Builder(""myns"")
+ .namespace(""/my/namespace"").build();
+ PackageConfig pkg2 = new PackageConfig.Builder(""my"").namespace(""/my"").build();
+ config.addPackageConfig(""mvns"", pkg);
+ config.addPackageConfig(""my"", pkg2);
+ configManager = new ConfigurationManager() {
+ public Configuration getConfiguration() {
+ return config;
+ }
+ };
+ }
+
+"
+-1," public String getName() {
+ // Should we return the ID for the principal name? (No, because the
+ // UaaUserDatabase retrieves users by name.)
+ return principal.getName();
+ }
+
+ @Override
+"
+-1," public static IdStrategy idStrategy() {
+ Jenkins j = Jenkins.getInstance();
+ SecurityRealm realm = j.getSecurityRealm();
+ if (realm == null) {
+ return IdStrategy.CASE_INSENSITIVE;
+ }
+ return realm.getUserIdStrategy();
+ }
+
+"
+-1," private static void verify(ActionRequestBuilder requestBuilder, boolean fail, long expectedCount) {
+ if (fail) {
+ if (requestBuilder instanceof MultiSearchRequestBuilder) {
+ MultiSearchResponse multiSearchResponse = ((MultiSearchRequestBuilder) requestBuilder).get();
+ assertThat(multiSearchResponse.getResponses().length, equalTo(1));
+ assertThat(multiSearchResponse.getResponses()[0].getResponse(), nullValue());
+ } else {
+ try {
+ requestBuilder.get();
+ fail(""IndexMissingException or IndexClosedException was expected"");
+ } catch (IndexMissingException | IndexClosedException e) {}
+ }
+ } else {
+ if (requestBuilder instanceof SearchRequestBuilder) {
+ SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) requestBuilder;
+ assertHitCount(searchRequestBuilder.get(), expectedCount);
+ } else if (requestBuilder instanceof CountRequestBuilder) {
+ CountRequestBuilder countRequestBuilder = (CountRequestBuilder) requestBuilder;
+ assertHitCount(countRequestBuilder.get(), expectedCount);
+ } else if (requestBuilder instanceof MultiSearchRequestBuilder) {
+ MultiSearchResponse multiSearchResponse = ((MultiSearchRequestBuilder) requestBuilder).get();
+ assertThat(multiSearchResponse.getResponses().length, equalTo(1));
+ assertThat(multiSearchResponse.getResponses()[0].getResponse(), notNullValue());
+ } else {
+ requestBuilder.get();
+ }
+ }
+ }
+
+"
+-1," public void setUp() throws Exception {
+ TestClient testClient = new TestClient(getMockMvc());
+ adminToken = testClient.getClientCredentialsOAuthAccessToken(""admin"", ""adminsecret"",
+ ""clients.read clients.write clients.secret scim.write"");
+ String clientId = generator.generate().toLowerCase();
+ String clientSecret = generator.generate().toLowerCase();
+
+ BaseClientDetails clientDetails = new BaseClientDetails(clientId, null, null, ""client_credentials"", ""password.write"");
+ clientDetails.setClientSecret(clientSecret);
+
+ utils().createClient(getMockMvc(), adminToken, clientDetails);
+
+ passwordWriteToken = testClient.getClientCredentialsOAuthAccessToken(clientId, clientSecret,""password.write"");
+ }
+
+ @Test
+"
+-1," public Iterator getGroups() {
+
+ synchronized (groups) {
+ return (groups.values().iterator());
+ }
+
+ }
+
+
+ /**
+ * Return the unique global identifier of this user database.
+ */
+ @Override
+"
+-1," public boolean refersDirectlyTo(PyObject ob) {
+ if (ob == null || co_consts == null) {
+ return false;
+ } else {
+ for (PyObject obj: co_consts) {
+ if (obj == ob) {
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+"
+-1," public Charset getCharset() {
+ if (charset == null) {
+ charset = DEFAULT_CHARSET;
+ }
+ return charset;
+ }
+
+ /**
+ * Returns the message bytes.
+ */
+"
+-1," public String getDisplayName() {
+ return Messages.UpstreamComitterRecipientProvider_DisplayName();
+ }
+ }
+}
+"
+-1," public final String convert(String str, boolean query)
+ {
+ if (str == null) return null;
+
+ if( (!query || str.indexOf( '+' ) < 0) && str.indexOf( '%' ) < 0 )
+ return str;
+
+ StringBuffer dec = new StringBuffer(); // decoded string output
+ int strPos = 0;
+ int strLen = str.length();
+
+ dec.ensureCapacity(str.length());
+ while (strPos < strLen) {
+ int laPos; // lookahead position
+
+ // look ahead to next URLencoded metacharacter, if any
+ for (laPos = strPos; laPos < strLen; laPos++) {
+ char laChar = str.charAt(laPos);
+ if ((laChar == '+' && query) || (laChar == '%')) {
+ break;
+ }
+ }
+
+ // if there were non-metacharacters, copy them all as a block
+ if (laPos > strPos) {
+ dec.append(str.substring(strPos,laPos));
+ strPos = laPos;
+ }
+
+ // shortcut out of here if we're at the end of the string
+ if (strPos >= strLen) {
+ break;
+ }
+
+ // process next metacharacter
+ char metaChar = str.charAt(strPos);
+ if (metaChar == '+') {
+ dec.append(' ');
+ strPos++;
+ continue;
+ } else if (metaChar == '%') {
+ // We throw the original exception - the super will deal with
+ // it
+ // try {
+ dec.append((char)Integer.
+ parseInt(str.substring(strPos + 1, strPos + 3),16));
+ strPos += 3;
+ }
+ }
+
+ return dec.toString();
+ }
+
+
+
+"
+-1," public final void parse(Set mappingStreams) {
+ try {
+ // JAXBContext#newInstance() requires several permissions internally and doesn't use any privileged blocks
+ // itself; Wrapping it here avoids that all calling code bases need to have these permissions as well
+ JAXBContext jc = run( NewJaxbContext.action( ConstraintMappingsType.class ) );
+
+ Set alreadyProcessedConstraintDefinitions = newHashSet();
+ for ( InputStream in : mappingStreams ) {
+ String schemaVersion = xmlParserHelper.getSchemaVersion( ""constraint mapping file"", in );
+ String schemaResourceName = getSchemaResourceName( schemaVersion );
+ Schema schema = xmlParserHelper.getSchema( schemaResourceName );
+
+ Unmarshaller unmarshaller = jc.createUnmarshaller();
+ unmarshaller.setSchema( schema );
+
+ ConstraintMappingsType mapping = getValidationConfig( in, unmarshaller );
+ String defaultPackage = mapping.getDefaultPackage();
+
+ parseConstraintDefinitions(
+ mapping.getConstraintDefinition(),
+ defaultPackage,
+ alreadyProcessedConstraintDefinitions
+ );
+
+ for ( BeanType bean : mapping.getBean() ) {
+ Class> beanClass = ClassLoadingHelper.loadClass( bean.getClazz(), defaultPackage );
+ checkClassHasNotBeenProcessed( processedClasses, beanClass );
+
+ // update annotation ignores
+ annotationProcessingOptions.ignoreAnnotationConstraintForClass(
+ beanClass,
+ bean.getIgnoreAnnotations()
+ );
+
+ ConstrainedType constrainedType = ConstrainedTypeBuilder.buildConstrainedType(
+ bean.getClassType(),
+ beanClass,
+ defaultPackage,
+ constraintHelper,
+ annotationProcessingOptions,
+ defaultSequences
+ );
+ if ( constrainedType != null ) {
+ addConstrainedElement( beanClass, constrainedType );
+ }
+
+ Set constrainedFields = ConstrainedFieldBuilder.buildConstrainedFields(
+ bean.getField(),
+ beanClass,
+ defaultPackage,
+ constraintHelper,
+ annotationProcessingOptions
+ );
+ addConstrainedElements( beanClass, constrainedFields );
+
+ Set constrainedGetters = ConstrainedGetterBuilder.buildConstrainedGetters(
+ bean.getGetter(),
+ beanClass,
+ defaultPackage,
+ constraintHelper,
+ annotationProcessingOptions
+ );
+ addConstrainedElements( beanClass, constrainedGetters );
+
+ Set constrainedConstructors = ConstrainedExecutableBuilder.buildConstructorConstrainedExecutable(
+ bean.getConstructor(),
+ beanClass,
+ defaultPackage,
+ parameterNameProvider,
+ constraintHelper,
+ annotationProcessingOptions
+ );
+ addConstrainedElements( beanClass, constrainedConstructors );
+
+ Set constrainedMethods = ConstrainedExecutableBuilder.buildMethodConstrainedExecutable(
+ bean.getMethod(),
+ beanClass,
+ defaultPackage,
+ parameterNameProvider,
+ constraintHelper,
+ annotationProcessingOptions
+ );
+ addConstrainedElements( beanClass, constrainedMethods );
+
+ processedClasses.add( beanClass );
+ }
+ }
+ }
+ catch ( JAXBException e ) {
+ throw log.getErrorParsingMappingFileException( e );
+ }
+ }
+
+"
+-1," static ASN1Enumerated fromOctetString(byte[] enc)
+ {
+ if (enc.length > 1)
+ {
+ return new ASN1Enumerated(enc);
+ }
+
+ if (enc.length == 0)
+ {
+ throw new IllegalArgumentException(""ENUMERATED has zero length"");
+ }
+ int value = enc[0] & 0xff;
+
+ if (value >= cache.length)
+ {
+ return new ASN1Enumerated(Arrays.clone(enc));
+ }
+
+ ASN1Enumerated possibleMatch = cache[value];
+
+ if (possibleMatch == null)
+ {
+ possibleMatch = cache[value] = new ASN1Enumerated(Arrays.clone(enc));
+ }
+
+ return possibleMatch;
+ }
+"
+-1," public void addRecipients(final ExtendedEmailPublisherContext context, EnvVars env, Set to, Set cc, Set bcc) {
+ final class Debug implements RecipientProviderUtilities.IDebug {
+ private final ExtendedEmailPublisherDescriptor descriptor
+ = Jenkins.getActiveInstance().getDescriptorByType(ExtendedEmailPublisherDescriptor.class);
+
+ private final PrintStream logger = context.getListener().getLogger();
+
+ public void send(final String format, final Object... args) {
+ descriptor.debug(logger, format, args);
+ }
+ }
+ final Debug debug = new Debug();
+ Set users = RecipientProviderUtilities.getChangeSetAuthors(Collections.>singleton(context.getRun()), debug);
+ RecipientProviderUtilities.addUsers(users, context, env, to, cc, bcc, debug);
+ }
+
+ @Extension
+"
+-1," protected void handleParams(ActionMapping mapping, StringBuilder uri) {
+ String name = mapping.getName();
+ String params = """";
+ if (name.indexOf('?') != -1) {
+ params = name.substring(name.indexOf('?'));
+ }
+ if (params.length() > 0) {
+ uri.append(params);
+ }
+ }
+
+"
+-1," public static void copyRecursively(final Path source, final Path target, boolean overwrite) throws IOException {
+ final CopyOption[] options;
+ if (overwrite) {
+ options = new CopyOption[]{StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING};
+ } else {
+ options = new CopyOption[]{StandardCopyOption.COPY_ATTRIBUTES};
+ }
+ Files.walkFileTree(source, new FileVisitor() {
+ @Override
+ public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
+ Files.copy(dir, target.resolve(source.relativize(dir)), options);
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override
+ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+ Files.copy(file, target.resolve(source.relativize(file)), options);
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override
+ public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
+ DeploymentRepositoryLogger.ROOT_LOGGER.cannotCopyFile(exc, file);
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override
+ public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
+ return FileVisitResult.CONTINUE;
+ }
+ });
+ }
+
+ /**
+ * Delete a path recursively, not throwing Exception if it fails or if the path is null.
+ * @param path a Path pointing to a file or a directory that may not exists anymore.
+ */
+"
+-1," public Properties defaultOutputProperties() {
+ Properties properties = new Properties();
+ properties.put(OutputKeys.ENCODING, defaultCharset);
+ properties.put(OutputKeys.OMIT_XML_DECLARATION, ""yes"");
+ return properties;
+ }
+
+ /**
+ * Converts the given input Source into the required result
+ */
+"
+-1," public static Factory prototypeFactory(final T prototype) {
+ if (prototype == null) {
+ return ConstantFactory.constantFactory(null);
+ }
+ try {
+ final Method method = prototype.getClass().getMethod(""clone"", (Class[]) null);
+ return new PrototypeCloneFactory(prototype, method);
+
+ } catch (final NoSuchMethodException ex) {
+ try {
+ prototype.getClass().getConstructor(new Class>[] { prototype.getClass() });
+ return new InstantiateFactory(
+ (Class) prototype.getClass(),
+ new Class>[] { prototype.getClass() },
+ new Object[] { prototype });
+ } catch (final NoSuchMethodException ex2) {
+ if (prototype instanceof Serializable) {
+ return (Factory) new PrototypeSerializationFactory((Serializable) prototype);
+ }
+ }
+ }
+ throw new IllegalArgumentException(""The prototype must be cloneable via a public clone method"");
+ }
+
+ /**
+ * Restricted constructor.
+ */
+"
+-1," public void sec2500PreventAnonymousBind() {
+ provider.authenticate(new UsernamePasswordAuthenticationToken(""rwinch"", """"));
+ }
+
+ @SuppressWarnings(""unchecked"")
+ @Test(expected = IncorrectResultSizeDataAccessException.class)
+"
+-1," protected void checkConfig() throws IOException {
+ // Create an unbound server socket
+ ServerSocket socket =
+ JdkCompat.getJdkCompat().getUnboundSocket(sslProxy);
+ if (socket == null) {
+ // Can create unbound sockets (1.3 JVM) - can't test the connection
+ return;
+ }
+ initServerSocket(socket);
+
+ try {
+ // Set the timeout to 1ms as all we care about is if it throws an
+ // SSLException on accept.
+ socket.setSoTimeout(1);
+
+ socket.accept();
+ // Will never get here - no client can connect to an unbound port
+ } catch (SSLException ssle) {
+ // SSL configuration is invalid. Possibly cert doesn't match ciphers
+ IOException ioe = new IOException(sm.getString(
+ ""jsse.invalid_ssl_conf"", ssle.getMessage()));
+ JdkCompat.getJdkCompat().chainException(ioe, ssle);
+ throw ioe;
+ } catch (Exception e) {
+ /*
+ * Possible ways of getting here
+ * socket.accept() throws a SecurityException
+ * socket.setSoTimeout() throws a SocketException
+ * socket.accept() throws some other exception (after a JDK change)
+ * In these cases the test won't work so carry on - essentially
+ * the behaviour before this patch
+ * socket.accept() throws a SocketTimeoutException
+ * In this case all is well so carry on
+ */
+ } finally {
+ // Should be open here but just in case
+ try {
+ socket.close();
+ } catch (IOException ioe) {
+ // Ignore
+ }
+ }
+
+ }
+"
+-1," private static boolean startsWithStringArray(String sArray[], String value) {
+ if (value == null) {
+ return false;
+ }
+ for (int i = 0; i < sArray.length; i++) {
+ if (value.startsWith(sArray[i])) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+
+ /**
+ * Check if the resource could be compressed, if the client supports it.
+ */
+"
+-1," protected String getProtocolName() {
+ return ""Http"";
+ }
+
+
+ // ------------------------------------------------ HTTP specific properties
+ // ------------------------------------------ managed in the ProtocolHandler
+
+"
+-1," public String toString() {
+ return ""parameter:'"" + parameterName + ""'"";
+ }
+ }
+
+}
+"
+-1," public long getAvailable();
+
+
+ /**
+ * Set the available date/time for this servlet, in milliseconds since the
+ * epoch. If this date/time is in the future, any request for this servlet
+ * will return an SC_SERVICE_UNAVAILABLE error. A value equal to
+ * Long.MAX_VALUE is considered to mean that unavailability is permanent.
+ *
+ * @param available The new available date/time
+ */
+"
+-1," protected abstract Log getLog();
+"
+-1," private static ManagedMap parseInterceptUrlsForFilterInvocationRequestMap(
+ MatcherType matcherType, List urlElts, boolean useExpressions,
+ boolean addAuthenticatedAll, ParserContext parserContext) {
+
+ ManagedMap filterInvocationDefinitionMap = new ManagedMap();
+
+ for (Element urlElt : urlElts) {
+ String access = urlElt.getAttribute(ATT_ACCESS);
+ if (!StringUtils.hasText(access)) {
+ continue;
+ }
+
+ String path = urlElt.getAttribute(ATT_PATTERN);
+
+ if (!StringUtils.hasText(path)) {
+ parserContext.getReaderContext().error(
+ ""path attribute cannot be empty or null"", urlElt);
+ }
+
+ String method = urlElt.getAttribute(ATT_HTTP_METHOD);
+ if (!StringUtils.hasText(method)) {
+ method = null;
+ }
+
+ BeanDefinition matcher = matcherType.createMatcher(parserContext, path,
+ method);
+ BeanDefinitionBuilder attributeBuilder = BeanDefinitionBuilder
+ .rootBeanDefinition(SecurityConfig.class);
+
+ if (useExpressions) {
+ logger.info(""Creating access control expression attribute '"" + access
+ + ""' for "" + path);
+ // The single expression will be parsed later by the
+ // ExpressionFilterInvocationSecurityMetadataSource
+ attributeBuilder.addConstructorArgValue(new String[] { access });
+ attributeBuilder.setFactoryMethod(""createList"");
+
+ }
+ else {
+ attributeBuilder.addConstructorArgValue(access);
+ attributeBuilder.setFactoryMethod(""createListFromCommaDelimitedString"");
+ }
+
+ if (filterInvocationDefinitionMap.containsKey(matcher)) {
+ logger.warn(""Duplicate URL defined: "" + path
+ + "". The original attribute values will be overwritten"");
+ }
+
+ filterInvocationDefinitionMap.put(matcher,
+ attributeBuilder.getBeanDefinition());
+ }
+
+ if (addAuthenticatedAll && filterInvocationDefinitionMap.isEmpty()) {
+
+ BeanDefinition matcher = matcherType.createMatcher(parserContext, ""/**"",
+ null);
+ BeanDefinitionBuilder attributeBuilder = BeanDefinitionBuilder
+ .rootBeanDefinition(SecurityConfig.class);
+ attributeBuilder.addConstructorArgValue(new String[] { ""authenticated"" });
+ attributeBuilder.setFactoryMethod(""createList"");
+ filterInvocationDefinitionMap.put(matcher,
+ attributeBuilder.getBeanDefinition());
+ }
+
+ return filterInvocationDefinitionMap;
+ }
+
+"
+-1," public DeserializerFactory withConfig(DeserializerFactoryConfig config)
+ {
+ if (_factoryConfig == config) {
+ return this;
+ }
+ /* 22-Nov-2010, tatu: Handling of subtypes is tricky if we do immutable-with-copy-ctor;
+ * and we pretty much have to here either choose between losing subtype instance
+ * when registering additional deserializers, or losing deserializers.
+ * Instead, let's actually just throw an error if this method is called when subtype
+ * has not properly overridden this method; this to indicate problem as soon as possible.
+ */
+ if (getClass() != BeanDeserializerFactory.class) {
+ throw new IllegalStateException(""Subtype of BeanDeserializerFactory (""+getClass().getName()
+ +"") has not properly overridden method 'withAdditionalDeserializers': can not instantiate subtype with ""
+ +""additional deserializer definitions"");
+ }
+ return new BeanDeserializerFactory(config);
+ }
+
+ /*
+ /**********************************************************
+ /* DeserializerFactory API implementation
+ /**********************************************************
+ */
+
+ /**
+ * Method that {@link DeserializerCache}s call to create a new
+ * deserializer for types other than Collections, Maps, arrays and
+ * enums.
+ */
+ @Override
+"
+-1," public void setApplicationContext(ApplicationContext applicationContext)
+ throws BeansException {
+ this.defaultWebSecurityExpressionHandler
+ .setApplicationContext(applicationContext);
+ this.ignoredRequestRegistry = new IgnoredRequestConfigurer(applicationContext);
+ }
+"
+-1," private SessionCreationPolicy createPolicy(String createSession) {
+ if (""ifRequired"".equals(createSession)) {
+ return SessionCreationPolicy.IF_REQUIRED;
+ }
+ else if (""always"".equals(createSession)) {
+ return SessionCreationPolicy.ALWAYS;
+ }
+ else if (""never"".equals(createSession)) {
+ return SessionCreationPolicy.NEVER;
+ }
+ else if (""stateless"".equals(createSession)) {
+ return SessionCreationPolicy.STATELESS;
+ }
+
+ throw new IllegalStateException(""Cannot convert "" + createSession + "" to ""
+ + SessionCreationPolicy.class.getName());
+ }
+
+ @SuppressWarnings(""rawtypes"")
+"
+1," public PlainText decrypt(SecretKey key, CipherText ciphertext)
+ throws EncryptionException, IllegalArgumentException
+ {
+ long start = System.nanoTime(); // Current time in nanosecs; used to prevent timing attacks
+ if ( key == null ) {
+ throw new IllegalArgumentException(""SecretKey arg may not be null"");
+ }
+ if ( ciphertext == null ) {
+ throw new IllegalArgumentException(""Ciphertext may arg not be null"");
+ }
+
+ if ( ! CryptoHelper.isAllowedCipherMode(ciphertext.getCipherMode()) ) {
+ // This really should be an illegal argument exception, but it could
+ // mean that a partner encrypted something using a cipher mode that
+ // you do not accept, so it's a bit more complex than that. Also
+ // throwing an IllegalArgumentException doesn't allow us to provide
+ // the two separate error messages or automatically log it.
+ throw new EncryptionException(DECRYPTION_FAILED,
+ ""Invalid cipher mode "" + ciphertext.getCipherMode() +
+ "" not permitted for decryption or encryption operations."");
+ }
+ logger.debug(Logger.EVENT_SUCCESS,
+ ""Args valid for JavaEncryptor.decrypt(SecretKey,CipherText): "" +
+ ciphertext);
+
+ PlainText plaintext = null;
+ boolean caughtException = false;
+ int progressMark = 0;
+ try {
+ // First we validate the MAC.
+ boolean valid = CryptoHelper.isCipherTextMACvalid(key, ciphertext);
+ if ( !valid ) {
+ try {
+ // This is going to fail, but we want the same processing
+ // to occur as much as possible so as to prevent timing
+ // attacks. We _could_ just be satisfied by the additional
+ // sleep in the 'finally' clause, but an attacker on the
+ // same server who can run something like 'ps' can tell
+ // CPU time versus when the process is sleeping. Hence we
+ // try to make this as close as possible. Since we know
+ // it is going to fail, we ignore the result and ignore
+ // the (expected) exception.
+ handleDecryption(key, ciphertext); // Ignore return (should fail).
+ } catch(Exception ex) {
+ ; // Ignore
+ }
+ throw new EncryptionException(DECRYPTION_FAILED,
+ ""Decryption failed because MAC invalid for "" +
+ ciphertext);
+ }
+ progressMark++;
+ // The decryption only counts if the MAC was valid.
+ plaintext = handleDecryption(key, ciphertext);
+ progressMark++;
+ } catch(EncryptionException ex) {
+ caughtException = true;
+ String logMsg = null;
+ switch( progressMark ) {
+ case 1:
+ logMsg = ""Decryption failed because MAC invalid. See logged exception for details."";
+ break;
+ case 2:
+ logMsg = ""Decryption failed because handleDecryption() failed. See logged exception for details."";
+ break;
+ default:
+ logMsg = ""Programming error: unexpected progress mark == "" + progressMark;
+ break;
+ }
+ logger.error(Logger.SECURITY_FAILURE, logMsg);
+ throw ex; // Re-throw
+ }
+ finally {
+ if ( caughtException ) {
+ // The rest of this code is to try to account for any minute differences
+ // in the time it might take for the various reasons that decryption fails
+ // in order to prevent any other possible timing attacks. Perhaps it is
+ // going overboard. If nothing else, if N_SECS is large enough, it might
+ // deter attempted repeated attacks by making them take much longer.
+ long now = System.nanoTime();
+ long elapsed = now - start;
+ final long NANOSECS_IN_SEC = 1000000000L; // nanosec is 10**-9 sec
+ long nSecs = N_SECS * NANOSECS_IN_SEC; // N seconds in nano seconds
+ if ( elapsed < nSecs ) {
+ // Want to sleep so total time taken is N seconds.
+ long extraSleep = nSecs - elapsed;
+
+ // 'extraSleep' is in nanoseconds. Need to convert to a millisec
+ // part and nanosec part. Nanosec is 10**-9, millsec is
+ // 10**-3, so divide by (10**-9 / 10**-3), or 10**6 to
+ // convert to from nanoseconds to milliseconds.
+ long millis = extraSleep / 1000000L;
+ long nanos = (extraSleep - (millis * 1000000L));
+ assert nanos >= 0 && nanos <= Integer.MAX_VALUE :
+ ""Nanosecs out of bounds; nanos = "" + nanos;
+ try {
+ Thread.sleep(millis, (int)nanos);
+ } catch(InterruptedException ex) {
+ ; // Ignore
+ }
+ } // Else ... time already exceeds N_SECS sec, so do not sleep.
+ }
+ }
+ return plaintext;
+ }
+
+ // Handle the actual decryption portion. At this point it is assumed that
+ // any MAC has already been validated. (But see ""DISCUSS"" issue, below.)
+"
+1," public List getAclForPath(String path) {
+ List acls = zkACLProvider.getACLsToAdd(path);
+ return acls;
+ }
+ };
+ }
+
+"
+1," protected Object readResolve()
+ throws ObjectStreamException {
+ AbstractBrokerFactory factory = getPooledFactoryForKey(_poolKey);
+ if (factory != null)
+ return factory;
+
+ // reset these transient fields to empty values
+ _transactional = new ConcurrentHashMap();
+ _brokers = newBrokerSet();
+
+ makeReadOnly();
+ return this;
+ }
+
+"
+1," public void testRepositoryCreation() throws Exception {
+ Client client = client();
+
+ File location = newTempDir(LifecycleScope.SUITE);
+
+ logger.info(""--> creating repository"");
+ PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository(""test-repo-1"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder()
+ .put(""location"", location)
+ ).get();
+ assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
+
+ logger.info(""--> verify the repository"");
+ int numberOfFiles = location.listFiles().length;
+ VerifyRepositoryResponse verifyRepositoryResponse = client.admin().cluster().prepareVerifyRepository(""test-repo-1"").get();
+ assertThat(verifyRepositoryResponse.getNodes().length, equalTo(cluster().numDataAndMasterNodes()));
+
+ logger.info(""--> verify that we didn't leave any files as a result of verification"");
+ assertThat(location.listFiles().length, equalTo(numberOfFiles));
+
+ logger.info(""--> check that repository is really there"");
+ ClusterStateResponse clusterStateResponse = client.admin().cluster().prepareState().clear().setMetaData(true).get();
+ MetaData metaData = clusterStateResponse.getState().getMetaData();
+ RepositoriesMetaData repositoriesMetaData = metaData.custom(RepositoriesMetaData.TYPE);
+ assertThat(repositoriesMetaData, notNullValue());
+ assertThat(repositoriesMetaData.repository(""test-repo-1""), notNullValue());
+ assertThat(repositoriesMetaData.repository(""test-repo-1"").type(), equalTo(""fs""));
+
+ logger.info(""--> creating another repository"");
+ putRepositoryResponse = client.admin().cluster().preparePutRepository(""test-repo-2"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder()
+ .put(""location"", newTempDir(LifecycleScope.SUITE))
+ ).get();
+ assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
+
+ logger.info(""--> check that both repositories are in cluster state"");
+ clusterStateResponse = client.admin().cluster().prepareState().clear().setMetaData(true).get();
+ metaData = clusterStateResponse.getState().getMetaData();
+ repositoriesMetaData = metaData.custom(RepositoriesMetaData.TYPE);
+ assertThat(repositoriesMetaData, notNullValue());
+ assertThat(repositoriesMetaData.repositories().size(), equalTo(2));
+ assertThat(repositoriesMetaData.repository(""test-repo-1""), notNullValue());
+ assertThat(repositoriesMetaData.repository(""test-repo-1"").type(), equalTo(""fs""));
+ assertThat(repositoriesMetaData.repository(""test-repo-2""), notNullValue());
+ assertThat(repositoriesMetaData.repository(""test-repo-2"").type(), equalTo(""fs""));
+
+ logger.info(""--> check that both repositories can be retrieved by getRepositories query"");
+ GetRepositoriesResponse repositoriesResponse = client.admin().cluster().prepareGetRepositories().get();
+ assertThat(repositoriesResponse.repositories().size(), equalTo(2));
+ assertThat(findRepository(repositoriesResponse.repositories(), ""test-repo-1""), notNullValue());
+ assertThat(findRepository(repositoriesResponse.repositories(), ""test-repo-2""), notNullValue());
+
+ logger.info(""--> delete repository test-repo-1"");
+ client.admin().cluster().prepareDeleteRepository(""test-repo-1"").get();
+ repositoriesResponse = client.admin().cluster().prepareGetRepositories().get();
+ assertThat(repositoriesResponse.repositories().size(), equalTo(1));
+ assertThat(findRepository(repositoriesResponse.repositories(), ""test-repo-2""), notNullValue());
+
+ logger.info(""--> delete repository test-repo-2"");
+ client.admin().cluster().prepareDeleteRepository(""test-repo-2"").get();
+ repositoriesResponse = client.admin().cluster().prepareGetRepositories().get();
+ assertThat(repositoriesResponse.repositories().size(), equalTo(0));
+ }
+
+"
+1," protected void parseParameters() {
+
+ parametersParsed = true;
+
+ Parameters parameters = coyoteRequest.getParameters();
+
+ // getCharacterEncoding() may have been overridden to search for
+ // hidden form field containing request encoding
+ String enc = getCharacterEncoding();
+
+ boolean useBodyEncodingForURI = connector.getUseBodyEncodingForURI();
+ if (enc != null) {
+ parameters.setEncoding(enc);
+ if (useBodyEncodingForURI) {
+ parameters.setQueryStringEncoding(enc);
+ }
+ } else {
+ parameters.setEncoding
+ (org.apache.coyote.Constants.DEFAULT_CHARACTER_ENCODING);
+ if (useBodyEncodingForURI) {
+ parameters.setQueryStringEncoding
+ (org.apache.coyote.Constants.DEFAULT_CHARACTER_ENCODING);
+ }
+ }
+
+ parameters.handleQueryParameters();
+
+ if (usingInputStream || usingReader)
+ return;
+
+ if (!getMethod().equalsIgnoreCase(""POST""))
+ return;
+
+ String contentType = getContentType();
+ if (contentType == null)
+ contentType = """";
+ int semicolon = contentType.indexOf(';');
+ if (semicolon >= 0) {
+ contentType = contentType.substring(0, semicolon).trim();
+ } else {
+ contentType = contentType.trim();
+ }
+ if (!(""application/x-www-form-urlencoded"".equals(contentType)))
+ return;
+
+ int len = getContentLength();
+
+ if (len > 0) {
+ int maxPostSize = connector.getMaxPostSize();
+ if ((maxPostSize > 0) && (len > maxPostSize)) {
+ context.getLogger().info
+ (sm.getString(""coyoteRequest.postTooLarge""));
+ throw new IllegalStateException(""Post too large"");
+ }
+ try {
+ byte[] formData = null;
+ if (len < CACHED_POST_LEN) {
+ if (postData == null)
+ postData = new byte[CACHED_POST_LEN];
+ formData = postData;
+ } else {
+ formData = new byte[len];
+ }
+ int actualLen = readPostBody(formData, len);
+ if (actualLen == len) {
+ parameters.processParameters(formData, 0, len);
+ }
+ } catch (Throwable t) {
+ context.getLogger().warn
+ (sm.getString(""coyoteRequest.parseParameters""), t);
+ }
+ } else if (""chunked"".equalsIgnoreCase(
+ coyoteRequest.getHeader(""transfer-encoding""))) {
+ byte[] formData = null;
+ try {
+ formData = readChunkedPostBody();
+ } catch (IOException e) {
+ // Client disconnect
+ if (context.getLogger().isDebugEnabled()) {
+ context.getLogger().debug(
+ sm.getString(""coyoteRequest.parseParameters""), e);
+ }
+ return;
+ }
+ if (formData != null) {
+ parameters.processParameters(formData, 0, formData.length);
+ }
+ }
+
+ }
+
+
+ /**
+ * Read post body in an array.
+ */
+"
+1," public void unzipping_stream_extracts_subset_of_files() throws IOException {
+ InputStream zip = urlToZip().openStream();
+ File toDir = temp.newFolder();
+
+ ZipUtils.unzip(zip, toDir, (ZipUtils.ZipEntryFilter)ze -> ze.getName().equals(""foo.txt""));
+ assertThat(toDir.listFiles()).containsOnly(new File(toDir, ""foo.txt""));
+ }
+
+"
+1," protected boolean addInputFilter(InputFilter[] inputFilters,
+ String encodingName) {
+ if (encodingName.equals(""identity"")) {
+ // Skip
+ } else if (encodingName.equals(""chunked"")) {
+ inputBuffer.addActiveFilter
+ (inputFilters[Constants.CHUNKED_FILTER]);
+ contentDelimitation = true;
+ } else {
+ for (int i = 2; i < inputFilters.length; i++) {
+ if (inputFilters[i].getEncodingName()
+ .toString().equals(encodingName)) {
+ inputBuffer.addActiveFilter(inputFilters[i]);
+ return true;
+ }
+ }
+ return false;
+ }
+ return true;
+ }
+
+
+ /**
+ * Specialized utility method: find a sequence of lower case bytes inside
+ * a ByteChunk.
+ */
+"
+1," public KeystoreInstance createKeystore(String name, char[] password, String keystoreType) throws KeystoreException {
+ File test = new File(directory, name);
+ if(test.exists()) {
+ throw new IllegalArgumentException(""Keystore already exists ""+test.getAbsolutePath()+""!"");
+ }
+ try {
+ KeyStore keystore = KeyStore.getInstance(keystoreType);
+ keystore.load(null, password);
+ OutputStream out = new BufferedOutputStream(new FileOutputStream(test));
+ keystore.store(out, password);
+ out.flush();
+ out.close();
+ return getKeystore(name, keystoreType);
+ } catch (KeyStoreException e) {
+ throw new KeystoreException(""Unable to create keystore"", e);
+ } catch (IOException e) {
+ throw new KeystoreException(""Unable to create keystore"", e);
+ } catch (NoSuchAlgorithmException e) {
+ throw new KeystoreException(""Unable to create keystore"", e);
+ } catch (CertificateException e) {
+ throw new KeystoreException(""Unable to create keystore"", e);
+ }
+ }
+
+"
+1," private ControllerInfo parseCInfoString(String cInfoString) {
+ Annotations annotation;
+
+ String[] config = cInfoString.split("","");
+ if (config.length == 2) {
+ String[] pair = config[1].split(""="");
+
+ if (pair.length == 2) {
+ annotation = DefaultAnnotations.builder()
+ .set(pair[0], pair[1]).build();
+ } else {
+ print(""Wrong format {}"", config[1]);
+ return null;
+ }
+
+ String[] data = config[0].split("":"");
+ String type = data[0];
+ IpAddress ip = IpAddress.valueOf(data[1]);
+ int port = Integer.parseInt(data[2]);
+
+ return new ControllerInfo(ip, port, type, annotation);
+ } else {
+ print(config[0]);
+ return new ControllerInfo(config[0]);
+ }
+ }
+"
+1," public void filterWithParameter() throws IOException, ServletException {
+ MockHttpServletRequest request = new MockHttpServletRequest(""POST"", ""/hotels"");
+ request.addParameter(""_method"", ""delete"");
+ MockHttpServletResponse response = new MockHttpServletResponse();
+
+ FilterChain filterChain = new FilterChain() {
+
+ @Override
+ public void doFilter(ServletRequest filterRequest,
+ ServletResponse filterResponse) throws IOException, ServletException {
+ assertEquals(""Invalid method"", ""DELETE"",
+ ((HttpServletRequest) filterRequest).getMethod());
+ }
+ };
+ filter.doFilter(request, response, filterChain);
+ }
+
+ @Test
+"
+1," public Authentication authenticate(Authentication authentication) throws AuthenticationException {
+ Assert.isInstanceOf(UsernamePasswordAuthenticationToken.class, authentication,
+ messages.getMessage(""LdapAuthenticationProvider.onlySupports"",
+ ""Only UsernamePasswordAuthenticationToken is supported""));
+
+ final UsernamePasswordAuthenticationToken userToken = (UsernamePasswordAuthenticationToken)authentication;
+
+ String username = userToken.getName();
+ String password = (String) authentication.getCredentials();
+
+ if (logger.isDebugEnabled()) {
+ logger.debug(""Processing authentication request for user: "" + username);
+ }
+
+ if (!StringUtils.hasLength(username)) {
+ throw new BadCredentialsException(messages.getMessage(""LdapAuthenticationProvider.emptyUsername"",
+ ""Empty Username""));
+ }
+
+ Assert.notNull(password, ""Null password was supplied in authentication token"");
+
+ DirContextOperations userData = doAuthentication(userToken);
+
+ UserDetails user = userDetailsContextMapper.mapUserFromContext(userData, authentication.getName(),
+ loadUserAuthorities(userData, authentication.getName(), (String)authentication.getCredentials()));
+
+ return createSuccessfulAuthentication(userToken, user);
+ }
+
+"
+1," public CsrfConfigurer ignoringAntMatchers(String... antPatterns) {
+ return new IgnoreCsrfProtectionRegistry().antMatchers(antPatterns).and();
+ }
+
+ @SuppressWarnings(""unchecked"")
+ @Override
+"
+1," public void commence(ServletRequest request, ServletResponse response,
+ AuthenticationException authException) throws IOException, ServletException {
+
+ HttpServletRequest hrequest = (HttpServletRequest)request;
+ HttpServletResponse hresponse = (HttpServletResponse)response;
+ FedizContext fedContext = federationConfig.getFedizContext();
+ LOG.debug(""Federation context: {}"", fedContext);
+
+ // Check to see if it is a metadata request
+ MetadataDocumentHandler mdHandler = new MetadataDocumentHandler(fedContext);
+ if (mdHandler.canHandleRequest(hrequest)) {
+ mdHandler.handleRequest(hrequest, hresponse);
+ return;
+ }
+
+ String redirectUrl = null;
+ try {
+ FedizProcessor wfProc =
+ FedizProcessorFactory.newFedizProcessor(fedContext.getProtocol());
+
+ RedirectionResponse redirectionResponse =
+ wfProc.createSignInRequest(hrequest, fedContext);
+ redirectUrl = redirectionResponse.getRedirectionURL();
+
+ if (redirectUrl == null) {
+ LOG.warn(""Failed to create SignInRequest."");
+ hresponse.sendError(
+ HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ""Failed to create SignInRequest."");
+ }
+
+ Map headers = redirectionResponse.getHeaders();
+ if (!headers.isEmpty()) {
+ for (Entry entry : headers.entrySet()) {
+ hresponse.addHeader(entry.getKey(), entry.getValue());
+ }
+ }
+
+ } catch (ProcessingException ex) {
+ System.err.println(""Failed to create SignInRequest: "" + ex.getMessage());
+ LOG.warn(""Failed to create SignInRequest: "" + ex.getMessage());
+ hresponse.sendError(
+ HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ""Failed to create SignInRequest."");
+ }
+
+ preCommence(hrequest, hresponse);
+ if (LOG.isInfoEnabled()) {
+ LOG.info(""Redirecting to IDP: "" + redirectUrl);
+ }
+ hresponse.sendRedirect(redirectUrl);
+
+ }
+
+"
+1," public void testWithVariantRequestOnly() throws Exception {
+ params.put(I18nInterceptor.DEFAULT_REQUESTONLY_PARAMETER, ""fr_CA_xx"");
+ interceptor.intercept(mai);
+
+ assertNull(params.get(I18nInterceptor.DEFAULT_PARAMETER)); // should have been removed
+ assertNull(session.get(I18nInterceptor.DEFAULT_SESSION_ATTRIBUTE));
+
+ Locale variant = new Locale(""fr"", ""CA"", ""xx"");
+ Locale locale = mai.getInvocationContext().getLocale();
+ assertNotNull(locale); // should be stored here
+ assertEquals(variant, locale);
+ assertEquals(""xx"", locale.getVariant());
+ }
+
+ @Test
+"
+1," private T run(PrivilegedAction action) {
+ return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run();
+ }
+"
+1," public void execute(FunctionContext context) {
+ ResultSender resultSender = context.getResultSender();
+
+ Cache cache = context.getCache();
+ String memberNameOrId = context.getMemberName();
+
+ RegionFunctionArgs regionCreateArgs = (RegionFunctionArgs) context.getArguments();
+
+ if (regionCreateArgs.isSkipIfExists()) {
+ Region region = cache.getRegion(regionCreateArgs.getRegionPath());
+ if (region != null) {
+ resultSender.lastResult(new CliFunctionResult(memberNameOrId, true,
+ CliStrings.format(
+ CliStrings.CREATE_REGION__MSG__SKIPPING_0_REGION_PATH_1_ALREADY_EXISTS,
+ memberNameOrId, regionCreateArgs.getRegionPath())));
+ return;
+ }
+ }
+
+ try {
+ Region, ?> createdRegion = createRegion(cache, regionCreateArgs);
+ XmlEntity xmlEntity = new XmlEntity(CacheXml.REGION, ""name"", createdRegion.getName());
+ resultSender.lastResult(new CliFunctionResult(memberNameOrId, xmlEntity,
+ CliStrings.format(CliStrings.CREATE_REGION__MSG__REGION_0_CREATED_ON_1,
+ createdRegion.getFullPath(), memberNameOrId)));
+ } catch (IllegalStateException e) {
+ String exceptionMsg = e.getMessage();
+ String localizedString =
+ LocalizedStrings.DiskStore_IS_USED_IN_NONPERSISTENT_REGION.toLocalizedString();
+ if (localizedString.equals(e.getMessage())) {
+ exceptionMsg = exceptionMsg + "" ""
+ + CliStrings.format(CliStrings.CREATE_REGION__MSG__USE_ONE_OF_THESE_SHORTCUTS_0,
+ new Object[] {String.valueOf(RegionCommandsUtils.PERSISTENT_OVERFLOW_SHORTCUTS)});
+ }
+ resultSender.lastResult(handleException(memberNameOrId, exceptionMsg, null/* do not log */));
+ } catch (IllegalArgumentException e) {
+ resultSender.lastResult(handleException(memberNameOrId, e.getMessage(), e));
+ } catch (RegionExistsException e) {
+ String exceptionMsg =
+ CliStrings.format(CliStrings.CREATE_REGION__MSG__REGION_PATH_0_ALREADY_EXISTS_ON_1,
+ regionCreateArgs.getRegionPath(), memberNameOrId);
+ resultSender.lastResult(handleException(memberNameOrId, exceptionMsg, e));
+ } catch (Exception e) {
+ String exceptionMsg = e.getMessage();
+ if (exceptionMsg == null) {
+ exceptionMsg = CliUtil.stackTraceAsString(e);
+ }
+ resultSender.lastResult(handleException(memberNameOrId, exceptionMsg, e));
+ }
+ }
+
+"
+1," public static Document signMetaInfo(Crypto crypto, String keyAlias, String keyPassword,
+ InputStream metaInfo, String referenceID) throws Exception {
+ if (keyAlias == null || """".equals(keyAlias)) {
+ keyAlias = crypto.getDefaultX509Identifier();
+ }
+ X509Certificate cert = CertsUtils.getX509Certificate(crypto, keyAlias);
+// }
+
+/* public static ByteArrayOutputStream signMetaInfo(FederationContext config, InputStream metaInfo,
+ String referenceID)
+ throws Exception {
+
+ KeyManager keyManager = config.getSigningKey();
+ String keyAlias = keyManager.getKeyAlias();
+ String keypass = keyManager.getKeyPassword();
+
+ // in case we did not specify the key alias, we assume there is only one key in the keystore ,
+ // we use this key's alias as default.
+ if (keyAlias == null || """".equals(keyAlias)) {
+ //keyAlias = getDefaultX509Identifier(ks);
+ keyAlias = keyManager.getCrypto().getDefaultX509Identifier();
+ }
+ CryptoType cryptoType = new CryptoType(CryptoType.TYPE.ALIAS);
+ cryptoType.setAlias(keyAlias);
+ X509Certificate[] issuerCerts = keyManager.getCrypto().getX509Certificates(cryptoType);
+ if (issuerCerts == null || issuerCerts.length == 0) {
+ throw new ProcessingException(
+ ""No issuer certs were found to sign the metadata using issuer name: ""
+ + keyAlias);
+ }
+ X509Certificate cert = issuerCerts[0];
+*/
+ String signatureMethod = null;
+ if (""SHA1withDSA"".equals(cert.getSigAlgName())) {
+ signatureMethod = SignatureMethod.DSA_SHA1;
+ } else if (""SHA1withRSA"".equals(cert.getSigAlgName())) {
+ signatureMethod = SignatureMethod.RSA_SHA1;
+ } else if (""SHA256withRSA"".equals(cert.getSigAlgName())) {
+ signatureMethod = SignatureMethod.RSA_SHA1;
+ } else {
+ LOG.error(""Unsupported signature method: "" + cert.getSigAlgName());
+ throw new RuntimeException(""Unsupported signature method: "" + cert.getSigAlgName());
+ }
+
+ List transformList = new ArrayList();
+ transformList.add(XML_SIGNATURE_FACTORY.newTransform(Transform.ENVELOPED, (TransformParameterSpec)null));
+ transformList.add(XML_SIGNATURE_FACTORY.newCanonicalizationMethod(CanonicalizationMethod.EXCLUSIVE,
+ (C14NMethodParameterSpec)null));
+
+ // Create a Reference to the enveloped document (in this case,
+ // you are signing the whole document, so a URI of """" signifies
+ // that, and also specify the SHA1 digest algorithm and
+ // the ENVELOPED Transform.
+ Reference ref = XML_SIGNATURE_FACTORY.newReference(
+ ""#"" + referenceID,
+ XML_SIGNATURE_FACTORY.newDigestMethod(DigestMethod.SHA1, null),
+ transformList,
+ null, null);
+
+ // Create the SignedInfo.
+ SignedInfo si = XML_SIGNATURE_FACTORY.newSignedInfo(
+ XML_SIGNATURE_FACTORY.newCanonicalizationMethod(
+ CanonicalizationMethod.EXCLUSIVE, (C14NMethodParameterSpec)null),
+ XML_SIGNATURE_FACTORY.newSignatureMethod(
+ signatureMethod, null), Collections.singletonList(ref));
+
+ // step 2
+ // Load the KeyStore and get the signing key and certificate.
+
+ PrivateKey keyEntry = crypto.getPrivateKey(keyAlias, keyPassword);
+
+ // Create the KeyInfo containing the X509Data.
+ KeyInfoFactory kif = XML_SIGNATURE_FACTORY.getKeyInfoFactory();
+ List x509Content = new ArrayList();
+ x509Content.add(cert.getSubjectX500Principal().getName());
+ x509Content.add(cert);
+ X509Data xd = kif.newX509Data(x509Content);
+ KeyInfo ki = kif.newKeyInfo(Collections.singletonList(xd));
+
+ // step3
+ // Instantiate the document to be signed.
+ Document doc = DOC_BUILDER_FACTORY.newDocumentBuilder().parse(metaInfo);
+
+ // Create a DOMSignContext and specify the RSA PrivateKey and
+ // location of the resulting XMLSignature's parent element.
+ //DOMSignContext dsc = new DOMSignContext(keyEntry.getPrivateKey(), doc.getDocumentElement());
+ DOMSignContext dsc = new DOMSignContext(keyEntry, doc.getDocumentElement());
+ dsc.setIdAttributeNS(doc.getDocumentElement(), null, ""ID"");
+ dsc.setNextSibling(doc.getDocumentElement().getFirstChild());
+
+ // Create the XMLSignature, but don't sign it yet.
+ XMLSignature signature = XML_SIGNATURE_FACTORY.newXMLSignature(si, ki);
+
+ // Marshal, generate, and sign the enveloped signature.
+ signature.sign(dsc);
+
+ // step 4
+ // Output the resulting document.
+
+ return doc;
+ }
+
+"
+1," public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException {
+ int numDocs = scaledRandomIntBetween(100, 1000);
+ internalCluster().ensureAtLeastNumDataNodes(2);
+
+ assertAcked(prepareCreate(""test"").setSettings(ImmutableSettings.builder()
+ .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, ""0"") // no replicas for this test
+ .put(MergePolicyModule.MERGE_POLICY_TYPE_KEY, NoMergePolicyProvider.class)
+ .put(MockFSDirectoryService.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose
+ .put(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, true)
+ .put(TranslogService.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files
+ .put(""indices.recovery.concurrent_streams"", 10)
+ ));
+ ensureGreen();
+ IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
+ for (int i = 0; i < builders.length; i++) {
+ builders[i] = client().prepareIndex(""test"", ""type"").setSource(""field"", ""value"");
+ }
+ indexRandom(true, builders);
+ ensureGreen();
+ assertAllSuccessful(client().admin().indices().prepareFlush().setForce(true).setWaitIfOngoing(true).execute().actionGet());
+ // we have to flush at least once here since we don't corrupt the translog
+ CountResponse countResponse = client().prepareCount().get();
+ assertHitCount(countResponse, numDocs);
+
+ ShardRouting shardRouting = corruptRandomPrimaryFile(false);
+ // we don't corrupt segments.gen since S/R doesn't snapshot this file
+ // the other problem here why we can't corrupt segments.X files is that the snapshot flushes again before
+ // it snapshots and that will write a new segments.X+1 file
+ logger.info(""--> creating repository"");
+ assertAcked(client().admin().cluster().preparePutRepository(""test-repo"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder()
+ .put(""location"", newTempDir(LifecycleScope.SUITE).getAbsolutePath())
+ .put(""compress"", randomBoolean())
+ .put(""chunk_size"", randomIntBetween(100, 1000))));
+ logger.info(""--> snapshot"");
+ CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).setIndices(""test"").get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.PARTIAL));
+ logger.info(""failed during snapshot -- maybe SI file got corrupted"");
+ final List files = listShardFiles(shardRouting);
+ File corruptedFile = null;
+ for (File file : files) {
+ if (file.getName().startsWith(""corrupted_"")) {
+ corruptedFile = file;
+ break;
+ }
+ }
+ assertThat(corruptedFile, notNullValue());
+ }
+
+ /**
+ * This test verifies that if we corrupt a replica, we can still get to green, even though
+ * listing its store fails. Note, we need to make sure that replicas are allocated on all data
+ * nodes, so that replica won't be sneaky and allocated on a node that doesn't have a corrupted
+ * replica.
+ */
+ @Test
+"
+1," public void parse(InputStream stream, ContentHandler ignore,
+ Metadata metadata, ParseContext context) throws IOException,
+ SAXException, TikaException {
+ //Test to see if we should avoid parsing
+ if (parserState.recursiveParserWrapperHandler.hasHitMaximumEmbeddedResources()) {
+ return;
+ }
+ // Work out what this thing is
+ String objectName = getResourceName(metadata, parserState);
+ String objectLocation = this.location + objectName;
+
+ metadata.add(AbstractRecursiveParserWrapperHandler.EMBEDDED_RESOURCE_PATH, objectLocation);
+
+
+ //get a fresh handler
+ ContentHandler localHandler = parserState.recursiveParserWrapperHandler.getNewContentHandler();
+ parserState.recursiveParserWrapperHandler.startEmbeddedDocument(localHandler, metadata);
+
+ Parser preContextParser = context.get(Parser.class);
+ context.set(Parser.class, new EmbeddedParserDecorator(getWrappedParser(), objectLocation, parserState));
+ long started = System.currentTimeMillis();
+ try {
+ super.parse(stream, localHandler, metadata, context);
+ } catch (SAXException e) {
+ boolean wlr = isWriteLimitReached(e);
+ if (wlr == true) {
+ metadata.add(WRITE_LIMIT_REACHED, ""true"");
+ } else {
+ if (catchEmbeddedExceptions) {
+ ParserUtils.recordParserFailure(this, e, metadata);
+ } else {
+ throw e;
+ }
+ }
+ } catch (TikaException e) {
+ if (catchEmbeddedExceptions) {
+ ParserUtils.recordParserFailure(this, e, metadata);
+ } else {
+ throw e;
+ }
+ } finally {
+ context.set(Parser.class, preContextParser);
+ long elapsedMillis = System.currentTimeMillis() - started;
+ metadata.set(RecursiveParserWrapperHandler.PARSE_TIME_MILLIS, Long.toString(elapsedMillis));
+ parserState.recursiveParserWrapperHandler.endEmbeddedDocument(localHandler, metadata);
+ }
+ }
+ }
+
+ /**
+ * This tracks the state of the parse of a single document.
+ * In future versions, this will allow the RecursiveParserWrapper to be thread safe.
+ */
+ private class ParserState {
+ private int unknownCount = 0;
+ private final AbstractRecursiveParserWrapperHandler recursiveParserWrapperHandler;
+ private ParserState(AbstractRecursiveParserWrapperHandler handler) {
+ this.recursiveParserWrapperHandler = handler;
+ }
+
+
+ }
+}
+"
+1," private boolean breakKeepAliveLoop(SocketWrapperBase> socketWrapper) {
+ openSocket = keepAlive;
+ // Do sendfile as needed: add socket to sendfile and end
+ if (sendfileData != null && !getErrorState().isError()) {
+ sendfileData.keepAlive = keepAlive;
+ switch (socketWrapper.processSendfile(sendfileData)) {
+ case DONE:
+ // If sendfile is complete, no need to break keep-alive loop
+ sendfileData = null;
+ return false;
+ case PENDING:
+ return true;
+ case ERROR:
+ // Write failed
+ if (log.isDebugEnabled()) {
+ log.debug(sm.getString(""http11processor.sendfile.error""));
+ }
+ setErrorState(ErrorState.CLOSE_CONNECTION_NOW, null);
+ return true;
+ }
+ }
+ return false;
+ }
+
+
+ @Override
+"
+1," public void execute(FunctionContext context) {
+ RegionFunctionContext rfc = (RegionFunctionContext) context;
+ Set keys = (Set) rfc.getFilter();
+
+ // Get local (primary) data for the context
+ Region primaryDataSet = PartitionRegionHelper.getLocalDataForContext(rfc);
+
+ if (this.cache.getLogger().fineEnabled()) {
+ StringBuilder builder = new StringBuilder();
+ builder.append(""Function "").append(ID).append("" received request to touch "")
+ .append(primaryDataSet.getFullPath()).append(""->"").append(keys);
+ this.cache.getLogger().fine(builder.toString());
+ }
+
+ // Retrieve each value to update the lastAccessedTime.
+ // Note: getAll is not supported on LocalDataSet.
+ for (String key : keys) {
+ primaryDataSet.get(key);
+ }
+
+ // Return result to get around NPE in LocalResultCollectorImpl
+ context.getResultSender().lastResult(true);
+ }
+
+"
+1," private static void unzip(final ZipFile zip, final Path targetDir) throws IOException {
+ final Enumeration extends ZipEntry> entries = zip.entries();
+ while (entries.hasMoreElements()) {
+ final ZipEntry entry = entries.nextElement();
+ final String name = entry.getName();
+ final Path current = targetDir.resolve(name);
+ if (entry.isDirectory()) {
+ if (!Files.exists(current)) {
+ Files.createDirectories(current);
+ }
+ } else {
+ if (Files.notExists(current.getParent())) {
+ Files.createDirectories(current.getParent());
+ }
+ try (final InputStream eis = zip.getInputStream(entry)) {
+ Files.copy(eis, current);
+ }
+ }
+ try {
+ Files.getFileAttributeView(current, BasicFileAttributeView.class).setTimes(entry.getLastModifiedTime(), entry.getLastAccessTime(), entry.getCreationTime());
+ } catch (IOException e) {
+ //ignore, if we cannot set it, world will not end
+ }
+ }
+ }
+
+"
+1," private void updateGraphFromRequest(ActionRequest actionRequest, Graph graph) {
+ graph.setGraphName1(actionRequest.getParameter(""name""));
+ graph.setDescription(actionRequest.getParameter(""description""));
+ graph.setXlabel(actionRequest.getParameter(""xlabel""));
+ graph.setYlabel(actionRequest.getParameter(""ylabel""));
+ graph.setTimeFrame(Integer.parseInt(actionRequest.getParameter(""timeframe"")));
+ graph.setMBeanName(actionRequest.getParameter(""mbean""));
+ graph.setDataName1(actionRequest.getParameter(""dataname1""));
+ graph.setData1operation(actionRequest.getParameter(""data1operation"").charAt(0));
+
+ graph.setOperation(actionRequest.getParameter(""operation""));
+ if (graph.getOperation().equals(""other"")) {
+ graph.setOperation(actionRequest.getParameter(""othermath""));
+ }
+
+ graph.setShowArchive(actionRequest.getParameter(""showArchive"") != null
+ && actionRequest.getParameter(""showArchive"").equals(""on""));
+
+ graph.setDataName2(actionRequest.getParameter(""dataname2""));
+ graph.setData2operation(actionRequest.getParameter(""data2operation"") == null? 'A': actionRequest.getParameter(""data2operation"").charAt(0));
+ }
+
+"
+1," public BeanDefinition parse(Element elt, ParserContext pc) {
+ MatcherType matcherType = MatcherType.fromElement(elt);
+ String path = elt.getAttribute(HttpSecurityBeanDefinitionParser.ATT_PATH_PATTERN);
+ String requestMatcher = elt.getAttribute(ATT_REQUEST_MATCHER_REF);
+ String filters = elt.getAttribute(HttpSecurityBeanDefinitionParser.ATT_FILTERS);
+
+ BeanDefinitionBuilder builder = BeanDefinitionBuilder
+ .rootBeanDefinition(DefaultSecurityFilterChain.class);
+
+ if (StringUtils.hasText(path)) {
+ Assert.isTrue(!StringUtils.hasText(requestMatcher), """");
+ builder.addConstructorArgValue(matcherType.createMatcher(path, null));
+ }
+ else {
+ Assert.isTrue(StringUtils.hasText(requestMatcher), """");
+ builder.addConstructorArgReference(requestMatcher);
+ }
+
+ if (filters.equals(HttpSecurityBeanDefinitionParser.OPT_FILTERS_NONE)) {
+ builder.addConstructorArgValue(Collections.EMPTY_LIST);
+ }
+ else {
+ String[] filterBeanNames = StringUtils.tokenizeToStringArray(filters, "","");
+ ManagedList filterChain = new ManagedList(
+ filterBeanNames.length);
+
+ for (String name : filterBeanNames) {
+ filterChain.add(new RuntimeBeanReference(name));
+ }
+
+ builder.addConstructorArgValue(filterChain);
+ }
+
+ return builder.getBeanDefinition();
+ }
+"
+1," protected Principal authenticate(Connection dbConnection,
+ String username,
+ String credentials) {
+ // No user or no credentials
+ // Can't possibly authenticate, don't bother the database then
+ if (username == null || credentials == null) {
+ if (containerLog.isTraceEnabled())
+ containerLog.trace(sm.getString(""dataSourceRealm.authenticateFailure"",
+ username));
+ return null;
+ }
+
+ // Look up the user's credentials
+ String dbCredentials = getPassword(dbConnection, username);
+
+ if(dbCredentials == null) {
+ // User was not found in the database.
+
+ if (containerLog.isTraceEnabled())
+ containerLog.trace(sm.getString(""dataSourceRealm.authenticateFailure"",
+ username));
+ return null;
+ }
+
+ // Validate the user's credentials
+ boolean validated = getCredentialHandler().matches(credentials, dbCredentials);
+
+ if (validated) {
+ if (containerLog.isTraceEnabled())
+ containerLog.trace(sm.getString(""dataSourceRealm.authenticateSuccess"",
+ username));
+ } else {
+ if (containerLog.isTraceEnabled())
+ containerLog.trace(sm.getString(""dataSourceRealm.authenticateFailure"",
+ username));
+ return null;
+ }
+
+ ArrayList list = getRoles(dbConnection, username);
+
+ // Create and return a suitable Principal for this user
+ return new GenericPrincipal(username, credentials, list);
+ }
+
+
+ /**
+ * Close the specified database connection.
+ *
+ * @param dbConnection The connection to be closed
+ */
+"
+1," public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder holder,
+ ParserContext parserContext) {
+ BeanDefinition filterChainProxy = holder.getBeanDefinition();
+
+ ManagedList securityFilterChains = new ManagedList();
+ Element elt = (Element) node;
+
+ MatcherType matcherType = MatcherType.fromElement(elt);
+
+ List filterChainElts = DomUtils.getChildElementsByTagName(elt,
+ Elements.FILTER_CHAIN);
+
+ for (Element chain : filterChainElts) {
+ String path = chain
+ .getAttribute(HttpSecurityBeanDefinitionParser.ATT_PATH_PATTERN);
+ String filters = chain
+ .getAttribute(HttpSecurityBeanDefinitionParser.ATT_FILTERS);
+
+ if (!StringUtils.hasText(path)) {
+ parserContext.getReaderContext().error(
+ ""The attribute '""
+ + HttpSecurityBeanDefinitionParser.ATT_PATH_PATTERN
+ + ""' must not be empty"", elt);
+ }
+
+ if (!StringUtils.hasText(filters)) {
+ parserContext.getReaderContext().error(
+ ""The attribute '"" + HttpSecurityBeanDefinitionParser.ATT_FILTERS
+ + ""'must not be empty"", elt);
+ }
+
+ BeanDefinition matcher = matcherType.createMatcher(path, null);
+
+ if (filters.equals(HttpSecurityBeanDefinitionParser.OPT_FILTERS_NONE)) {
+ securityFilterChains.add(createSecurityFilterChain(matcher,
+ new ManagedList(0)));
+ }
+ else {
+ String[] filterBeanNames = StringUtils
+ .tokenizeToStringArray(filters, "","");
+ ManagedList filterChain = new ManagedList(filterBeanNames.length);
+
+ for (String name : filterBeanNames) {
+ filterChain.add(new RuntimeBeanReference(name));
+ }
+
+ securityFilterChains.add(createSecurityFilterChain(matcher, filterChain));
+ }
+ }
+
+ filterChainProxy.getConstructorArgumentValues().addGenericArgumentValue(
+ securityFilterChains);
+
+ return holder;
+ }
+
+"
+1," public XObject execute(XPathContext xctxt) throws javax.xml.transform.TransformerException
+ {
+
+ String fullName = m_arg0.execute(xctxt).str();
+ int indexOfNSSep = fullName.indexOf(':');
+ String result;
+ String propName = """";
+
+ // List of properties where the name of the
+ // property argument is to be looked for.
+ Properties xsltInfo = new Properties();
+
+ loadPropertyFile(XSLT_PROPERTIES, xsltInfo);
+
+ if (indexOfNSSep > 0)
+ {
+ String prefix = (indexOfNSSep >= 0)
+ ? fullName.substring(0, indexOfNSSep) : """";
+ String namespace;
+
+ namespace = xctxt.getNamespaceContext().getNamespaceForPrefix(prefix);
+ propName = (indexOfNSSep < 0)
+ ? fullName : fullName.substring(indexOfNSSep + 1);
+
+ if (namespace.startsWith(""http://www.w3.org/XSL/Transform"")
+ || namespace.equals(""http://www.w3.org/1999/XSL/Transform""))
+ {
+ result = xsltInfo.getProperty(propName);
+
+ if (null == result)
+ {
+ warn(xctxt, XPATHErrorResources.WG_PROPERTY_NOT_SUPPORTED,
+ new Object[]{ fullName }); //""XSL Property not supported: ""+fullName);
+
+ return XString.EMPTYSTRING;
+ }
+ }
+ else
+ {
+ warn(xctxt, XPATHErrorResources.WG_DONT_DO_ANYTHING_WITH_NS,
+ new Object[]{ namespace,
+ fullName }); //""Don't currently do anything with namespace ""+namespace+"" in property: ""+fullName);
+
+ try
+ {
+ result = System.getProperty(propName);
+
+ if (null == result)
+ {
+
+ // result = System.getenv(propName);
+ return XString.EMPTYSTRING;
+ }
+ }
+ catch (SecurityException se)
+ {
+ warn(xctxt, XPATHErrorResources.WG_SECURITY_EXCEPTION,
+ new Object[]{ fullName }); //""SecurityException when trying to access XSL system property: ""+fullName);
+
+ return XString.EMPTYSTRING;
+ }
+ }
+ }
+ else
+ {
+ try
+ {
+ result = System.getProperty(fullName);
+
+ if (null == result)
+ {
+
+ // result = System.getenv(fullName);
+ return XString.EMPTYSTRING;
+ }
+ }
+ catch (SecurityException se)
+ {
+ warn(xctxt, XPATHErrorResources.WG_SECURITY_EXCEPTION,
+ new Object[]{ fullName }); //""SecurityException when trying to access XSL system property: ""+fullName);
+
+ return XString.EMPTYSTRING;
+ }
+ }
+
+ if (propName.equals(""version"") && result.length() > 0)
+ {
+ try
+ {
+ // Needs to return the version number of the spec we conform to.
+ return new XString(""1.0"");
+ }
+ catch (Exception ex)
+ {
+ return new XString(result);
+ }
+ }
+ else
+ return new XString(result);
+ }
+
+ /**
+ * Retrieve a propery bundle from a specified file
+ *
+ * @param file The string name of the property file. The name
+ * should already be fully qualified as path/filename
+ * @param target The target property bag the file will be placed into.
+ */
+"
+1," private T run(PrivilegedAction action) {
+ return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run();
+ }
+"
+1," protected Locale getLocaleFromParam(Object requestedLocale) {
+ Locale locale = null;
+ if (requestedLocale != null) {
+ locale = (requestedLocale instanceof Locale) ?
+ (Locale) requestedLocale :
+ LocalizedTextUtil.localeFromString(requestedLocale.toString(), null);
+ if (locale != null) {
+ LOG.debug(""Applied request locale: {}"", locale);
+ }
+ }
+ return locale;
+ }
+
+ /**
+ * Reads the locale from the session, and if not found from the
+ * current invocation (=browser)
+ *
+ * @param invocation the current invocation
+ * @param session the current session
+ * @return the read locale
+ */
+"
+1," public static HierarchicalConfiguration loadXml(InputStream xmlStream) {
+ XMLConfiguration cfg = new XMLConfiguration();
+ try {
+ cfg.load(xmlStream);
+ return cfg;
+ } catch (ConfigurationException e) {
+ throw new IllegalArgumentException(""Cannot load xml from Stream"", e);
+ }
+ }
+
+"
+1," private void multiByteReadConsistentlyReturnsMinusOneAtEof(File file) throws Exception {
+ byte[] buf = new byte[2];
+ try (FileInputStream in = new FileInputStream(getFile(""bla.zip""));
+ ZipArchiveInputStream archive = new ZipArchiveInputStream(in)) {
+ ArchiveEntry e = archive.getNextEntry();
+ IOUtils.toByteArray(archive);
+ assertEquals(-1, archive.read(buf));
+ assertEquals(-1, archive.read(buf));
+ }
+ }
+
+"
+1," public void testRead7ZipMultiVolumeArchiveForStream() throws IOException {
+
+ final FileInputStream archive =
+ new FileInputStream(getFile(""apache-maven-2.2.1.zip.001""));
+ ZipArchiveInputStream zi = null;
+ try {
+ zi = new ZipArchiveInputStream(archive,null,false);
+
+ // these are the entries that are supposed to be processed
+ // correctly without any problems
+ for (final String element : ENTRIES) {
+ assertEquals(element, zi.getNextEntry().getName());
+ }
+
+ // this is the last entry that is truncated
+ final ArchiveEntry lastEntry = zi.getNextEntry();
+ assertEquals(LAST_ENTRY_NAME, lastEntry.getName());
+ final byte [] buffer = new byte [4096];
+
+ // before the fix, we'd get 0 bytes on this read and all
+ // subsequent reads thus a client application might enter
+ // an infinite loop after the fix, we should get an
+ // exception
+ try {
+ while (zi.read(buffer) > 0) { }
+ fail(""shouldn't be able to read from truncated entry"");
+ } catch (final IOException e) {
+ assertEquals(""Truncated ZIP file"", e.getMessage());
+ }
+
+ // and now we get another entry, which should also yield
+ // an exception
+ try {
+ zi.getNextEntry();
+ fail(""shouldn't be able to read another entry from truncated""
+ + "" file"");
+ } catch (final IOException e) {
+ // this is to be expected
+ }
+ } finally {
+ if (zi != null) {
+ zi.close();
+ }
+ }
+ }
+
+ @Test(expected=IOException.class)
+"
+1," void setByteChunk( ByteChunk mb ) {
+ initialized = (mb!=null);
+ bc = mb;
+ }
+
+"
+1," public void testUpdate() throws Exception
+ {
+ String xml =
+ """" +
+ "" "" +
+ "" "" +
+ "" "" +
+ "" "" +
+ "" "" +
+ "" "" +
+ "" "" +
+ "" "";
+
+ Map args = new HashMap();
+ args.put(CommonParams.TR, ""xsl-update-handler-test.xsl"");
+
+ SolrCore core = h.getCore();
+ LocalSolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) );
+ ArrayList streams = new ArrayList();
+ streams.add(new ContentStreamBase.StringStream(xml));
+ req.setContentStreams(streams);
+ SolrQueryResponse rsp = new SolrQueryResponse();
+ UpdateRequestHandler handler = new UpdateRequestHandler();
+ handler.init(new NamedList());
+ handler.handleRequestBody(req, rsp);
+ StringWriter sw = new StringWriter(32000);
+ QueryResponseWriter responseWriter = core.getQueryResponseWriter(req);
+ responseWriter.write(sw,req,rsp);
+ req.close();
+ String response = sw.toString();
+ assertU(response);
+ assertU(commit());
+
+ assertQ(""test document was correctly committed"", req(""q"",""*:*"")
+ , ""//result[@numFound='1']""
+ , ""//int[@name='id'][.='12345']""
+ );
+ }
+"
+1," protected Container createBootstrapContainer(List providers) {
+ ContainerBuilder builder = new ContainerBuilder();
+ boolean fmFactoryRegistered = false;
+ for (ContainerProvider provider : providers) {
+ if (provider instanceof FileManagerProvider) {
+ provider.register(builder, null);
+ }
+ if (provider instanceof FileManagerFactoryProvider) {
+ provider.register(builder, null);
+ fmFactoryRegistered = true;
+ }
+ }
+ builder.factory(ObjectFactory.class, Scope.SINGLETON);
+ builder.factory(FileManager.class, ""system"", DefaultFileManager.class, Scope.SINGLETON);
+ if (!fmFactoryRegistered) {
+ builder.factory(FileManagerFactory.class, DefaultFileManagerFactory.class, Scope.SINGLETON);
+ }
+ builder.factory(ReflectionProvider.class, OgnlReflectionProvider.class, Scope.SINGLETON);
+ builder.factory(ValueStackFactory.class, OgnlValueStackFactory.class, Scope.SINGLETON);
+
+ builder.factory(XWorkConverter.class, Scope.SINGLETON);
+ builder.factory(ConversionPropertiesProcessor.class, DefaultConversionPropertiesProcessor.class, Scope.SINGLETON);
+ builder.factory(ConversionFileProcessor.class, DefaultConversionFileProcessor.class, Scope.SINGLETON);
+ builder.factory(ConversionAnnotationProcessor.class, DefaultConversionAnnotationProcessor.class, Scope.SINGLETON);
+ builder.factory(TypeConverterCreator.class, DefaultTypeConverterCreator.class, Scope.SINGLETON);
+ builder.factory(TypeConverterHolder.class, DefaultTypeConverterHolder.class, Scope.SINGLETON);
+
+ builder.factory(XWorkBasicConverter.class, Scope.SINGLETON);
+ builder.factory(TypeConverter.class, XWorkConstants.COLLECTION_CONVERTER, CollectionConverter.class, Scope.SINGLETON);
+ builder.factory(TypeConverter.class, XWorkConstants.ARRAY_CONVERTER, ArrayConverter.class, Scope.SINGLETON);
+ builder.factory(TypeConverter.class, XWorkConstants.DATE_CONVERTER, DateConverter.class, Scope.SINGLETON);
+ builder.factory(TypeConverter.class, XWorkConstants.NUMBER_CONVERTER, NumberConverter.class, Scope.SINGLETON);
+ builder.factory(TypeConverter.class, XWorkConstants.STRING_CONVERTER, StringConverter.class, Scope.SINGLETON);
+ builder.factory(TextProvider.class, ""system"", DefaultTextProvider.class, Scope.SINGLETON);
+ builder.factory(ObjectTypeDeterminer.class, DefaultObjectTypeDeterminer.class, Scope.SINGLETON);
+ builder.factory(PropertyAccessor.class, CompoundRoot.class.getName(), CompoundRootAccessor.class, Scope.SINGLETON);
+ builder.factory(OgnlUtil.class, Scope.SINGLETON);
+ builder.constant(XWorkConstants.DEV_MODE, ""false"");
+ builder.constant(XWorkConstants.LOG_MISSING_PROPERTIES, ""false"");
+ builder.constant(XWorkConstants.RELOAD_XML_CONFIGURATION, ""false"");
+ return builder.create(true);
+ }
+
+ /**
+ * This builds the internal runtime configuration used by Xwork for finding and configuring Actions from the
+ * programmatic configuration data structures. All of the old runtime configuration will be discarded and rebuilt.
+ *
+ *
+ * It basically flattens the data structures to make the information easier to access. It will take
+ * an {@link ActionConfig} and combine its data with all inherited dast. For example, if the {@link ActionConfig}
+ * is in a package that contains a global result and it also contains a result, the resulting {@link ActionConfig}
+ * will have two results.
+ */
+"
+1," protected void initOther() throws ServletException {
+ PropertyUtils.addBeanIntrospector(
+ SuppressPropertiesBeanIntrospector.SUPPRESS_CLASS);
+ PropertyUtils.clearDescriptors();
+
+ String value = null;
+ value = getServletConfig().getInitParameter(""config"");
+ if (value != null) {
+ config = value;
+ }
+
+ // Backwards compatibility for form beans of Java wrapper classes
+ // Set to true for strict Struts 1.0 compatibility
+ value = getServletConfig().getInitParameter(""convertNull"");
+ if (""true"".equalsIgnoreCase(value)
+ || ""yes"".equalsIgnoreCase(value)
+ || ""on"".equalsIgnoreCase(value)
+ || ""y"".equalsIgnoreCase(value)
+ || ""1"".equalsIgnoreCase(value)) {
+
+ convertNull = true;
+ }
+
+ if (convertNull) {
+ ConvertUtils.deregister();
+ ConvertUtils.register(new BigDecimalConverter(null), BigDecimal.class);
+ ConvertUtils.register(new BigIntegerConverter(null), BigInteger.class);
+ ConvertUtils.register(new BooleanConverter(null), Boolean.class);
+ ConvertUtils.register(new ByteConverter(null), Byte.class);
+ ConvertUtils.register(new CharacterConverter(null), Character.class);
+ ConvertUtils.register(new DoubleConverter(null), Double.class);
+ ConvertUtils.register(new FloatConverter(null), Float.class);
+ ConvertUtils.register(new IntegerConverter(null), Integer.class);
+ ConvertUtils.register(new LongConverter(null), Long.class);
+ ConvertUtils.register(new ShortConverter(null), Short.class);
+ }
+
+ }
+
+
+ /**
+ *
Initialize the servlet mapping under which our controller servlet
+ * is being accessed. This will be used in the &html:form>
+ * tag to generate correct destination URLs for form submissions.
+ *
+ * @throws ServletException if error happens while scanning web.xml
+ */
+"
+1," private void parseUsingSAX(InputStream is) throws IOException, ParserConfigurationException, SAXException, SAXNotRecognizedException, SAXNotSupportedException
+ {
+ // Invoke the SAX XML parser on the input.
+ SAXParserFactory spf = SAXParserFactory.newInstance();
+
+ // Disable external entity resolving
+ spf.setFeature(""http://xml.org/sax/features/external-general-entities"", false);
+ spf.setFeature(""http://xml.org/sax/features/external-parameter-entities"", false);
+
+ SAXParser sp = spf.newSAXParser();
+ XMLReader xr = sp.getXMLReader();
+
+ SAXHandler handler = new SAXHandler();
+ xr.setContentHandler(handler);
+ xr.setProperty(""http://xml.org/sax/properties/lexical-handler"", handler);
+
+ xr.parse(new InputSource(is));
+ }
+
+
+"
+1," private ManagedMap parseInterceptUrlsForChannelSecurity() {
+
+ ManagedMap channelRequestMap = new ManagedMap();
+
+ for (Element urlElt : interceptUrls) {
+ String path = urlElt.getAttribute(ATT_PATH_PATTERN);
+ String method = urlElt.getAttribute(ATT_HTTP_METHOD);
+
+ if (!StringUtils.hasText(path)) {
+ pc.getReaderContext().error(""pattern attribute cannot be empty or null"",
+ urlElt);
+ }
+
+ String requiredChannel = urlElt.getAttribute(ATT_REQUIRES_CHANNEL);
+
+ if (StringUtils.hasText(requiredChannel)) {
+ BeanDefinition matcher = matcherType.createMatcher(path, method);
+
+ RootBeanDefinition channelAttributes = new RootBeanDefinition(
+ ChannelAttributeFactory.class);
+ channelAttributes.getConstructorArgumentValues().addGenericArgumentValue(
+ requiredChannel);
+ channelAttributes.setFactoryMethodName(""createChannelAttributes"");
+
+ channelRequestMap.put(matcher, channelAttributes);
+ }
+ }
+
+ return channelRequestMap;
+ }
+
+"
+1," public void doConfigSubmit(StaplerRequest req, StaplerResponse rsp) throws ServletException,
+ IOException,
+ InterruptedException {
+ if (logger.isDebugEnabled()) {
+ logger.debug(""submit {}"", req.toString());
+ }
+ JSONObject form = req.getSubmittedForm();
+ PluginConfig pluginConfig = PluginImpl.getPluginConfig_();
+ if (pluginConfig != null) {
+ pluginConfig.setValues(form);
+ PluginImpl.save_();
+ GerritSendCommandQueue.configure(pluginConfig);
+ }
+ //TODO reconfigure the incoming worker threads as well
+
+ rsp.sendRedirect(""."");
+ }
+
+"
+1," public Document getMetaData(
+ HttpServletRequest request, FedizContext config
+ ) throws ProcessingException {
+
+ try {
+ ByteArrayOutputStream bout = new ByteArrayOutputStream(4096);
+ Writer streamWriter = new OutputStreamWriter(bout, ""UTF-8"");
+ XMLStreamWriter writer = XML_OUTPUT_FACTORY.createXMLStreamWriter(streamWriter);
+
+ Protocol protocol = config.getProtocol();
+
+ writer.writeStartDocument(""UTF-8"", ""1.0"");
+
+ String referenceID = IDGenerator.generateID(""_"");
+ writer.writeStartElement(""md"", ""EntityDescriptor"", SAML2_METADATA_NS);
+ writer.writeAttribute(""ID"", referenceID);
+
+ String serviceURL = protocol.getApplicationServiceURL();
+ if (serviceURL == null) {
+ serviceURL = extractFullContextPath(request);
+ }
+
+ writer.writeAttribute(""entityID"", serviceURL);
+
+ writer.writeNamespace(""md"", SAML2_METADATA_NS);
+ writer.writeNamespace(""fed"", WS_FEDERATION_NS);
+ writer.writeNamespace(""wsa"", WS_ADDRESSING_NS);
+ writer.writeNamespace(""auth"", WS_FEDERATION_NS);
+ writer.writeNamespace(""xsi"", SCHEMA_INSTANCE_NS);
+
+ if (protocol instanceof FederationProtocol) {
+ writeFederationMetadata(writer, config, serviceURL);
+ } else if (protocol instanceof SAMLProtocol) {
+ writeSAMLMetadata(writer, request, config, serviceURL);
+ }
+
+ writer.writeEndElement(); // EntityDescriptor
+
+ writer.writeEndDocument();
+
+ streamWriter.flush();
+ bout.flush();
+ //
+
+ if (LOG.isDebugEnabled()) {
+ String out = new String(bout.toByteArray());
+ LOG.debug(""***************** unsigned ****************"");
+ LOG.debug(out);
+ LOG.debug(""***************** unsigned ****************"");
+ }
+
+ InputStream is = new ByteArrayInputStream(bout.toByteArray());
+
+ boolean hasSigningKey = false;
+ try {
+ if (config.getSigningKey().getCrypto() != null) {
+ hasSigningKey = true;
+ }
+ } catch (Exception ex) {
+ LOG.info(""No signingKey element found in config: "" + ex.getMessage());
+ }
+ if (hasSigningKey) {
+ Document result = SignatureUtils.signMetaInfo(
+ config.getSigningKey().getCrypto(), config.getSigningKey().getKeyAlias(), config.getSigningKey().getKeyPassword(), is, referenceID);
+ if (result != null) {
+ return result;
+ } else {
+ throw new ProcessingException(""Failed to sign the metadata document: result=null"");
+ }
+ }
+ return DOMUtils.readXml(is);
+ } catch (ProcessingException e) {
+ throw e;
+ } catch (Exception e) {
+ LOG.error(""Error creating service metadata information "", e);
+ throw new ProcessingException(""Error creating service metadata information: "" + e.getMessage());
+ }
+
+ }
+
+"
+1," public void testMavenTriggerEvenWhenUnstable() throws Exception {
+ doMavenTriggerTest(true);
+ }
+"
+1," boolean isTransferException();
+
+ /**
+ * The status codes which is considered a success response. The values are inclusive. The range must be defined as from-to with the dash included.
+ *
+ * The default range is 200-299
+ */
+"
+1," public Object instantiate(Class type, Configuration conf, boolean fatal) {
+ Object obj = newInstance(_name, type, conf, fatal);
+ Configurations.configureInstance(obj, conf, _props,
+ (fatal) ? getProperty() : null);
+ if (_singleton)
+ set(obj, true);
+ return obj;
+ }
+
+"
+1," public ScimGroup delete(String id, int version) throws ScimResourceNotFoundException {
+ ScimGroup group = retrieve(id);
+ membershipManager.removeMembersByGroupId(id);
+ externalGroupMappingManager.unmapAll(id);
+ int deleted;
+ if (version > 0) {
+ deleted = jdbcTemplate.update(DELETE_GROUP_SQL + "" and version=?;"", id, IdentityZoneHolder.get().getId(),version);
+ } else {
+ deleted = jdbcTemplate.update(DELETE_GROUP_SQL, id, IdentityZoneHolder.get().getId());
+ }
+ if (deleted != 1) {
+ throw new IncorrectResultSizeDataAccessException(1, deleted);
+ }
+ return group;
+ }
+
+"
+1," public Authentication attemptAuthentication(HttpServletRequest request) throws AuthenticationException {
+
+ SecurityContext context = SecurityContextHolder.getContext();
+ if (context != null) {
+ Authentication authentication = context.getAuthentication();
+ if (authentication instanceof FederationAuthenticationToken) {
+ // If we reach this point then the token must be expired
+ throw new ExpiredTokenException(""Token is expired"");
+ }
+ }
+
+ String wa = request.getParameter(FederationConstants.PARAM_ACTION);
+ String responseToken = getResponseToken(request);
+ FedizRequest wfReq = new FedizRequest();
+ wfReq.setAction(wa);
+ wfReq.setResponseToken(responseToken);
+ wfReq.setState(request.getParameter(SAMLSSOConstants.RELAY_STATE));
+ wfReq.setRequest(request);
+
+ X509Certificate certs[] =
+ (X509Certificate[])request.getAttribute(""javax.servlet.request.X509Certificate"");
+ wfReq.setCerts(certs);
+
+ final UsernamePasswordAuthenticationToken authRequest = new UsernamePasswordAuthenticationToken(null, wfReq);
+
+ authRequest.setDetails(authenticationDetailsSource.buildDetails(request));
+
+ return this.getAuthenticationManager().authenticate(authRequest);
+ }
+
+ @Override
+"
+1," public void testPrototypeFactoryPublicSerialization() throws Exception {
+ final Integer proto = Integer.valueOf(9);
+ final Factory factory = FactoryUtils.prototypeFactory(proto);
+ assertNotNull(factory);
+ final Integer created = factory.create();
+ assertTrue(proto != created);
+ assertEquals(proto, created);
+
+ // check serialisation works
+ final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ final ObjectOutputStream out = new ObjectOutputStream(buffer);
+ out.writeObject(factory);
+ out.close();
+ final ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray()));
+ in.readObject();
+ in.close();
+ }
+
+ @Test
+"
+1," protected void configure(HttpSecurity http) throws Exception {
+ logger.debug(""Using default configure(HttpSecurity). If subclassed this will potentially override subclass configure(HttpSecurity)."");
+
+ http
+ .authorizeRequests()
+ .anyRequest().authenticated()
+ .and()
+ .formLogin().and()
+ .httpBasic();
+ }
+ // @formatter:on
+
+ @Autowired
+"
+1," public SocketState process(SocketWrapper socket)
+ throws IOException {
+ RequestInfo rp = request.getRequestProcessor();
+ rp.setStage(org.apache.coyote.Constants.STAGE_PARSE);
+
+ // Setting up the socket
+ this.socket = socket;
+ input = socket.getSocket().getInputStream();
+ output = socket.getSocket().getOutputStream();
+ int soTimeout = -1;
+ if (keepAliveTimeout > 0) {
+ soTimeout = socket.getSocket().getSoTimeout();
+ }
+
+ // Error flag
+ error = false;
+
+ while (!error && !endpoint.isPaused()) {
+
+ // Parsing the request header
+ try {
+ // Set keep alive timeout if enabled
+ if (keepAliveTimeout > 0) {
+ socket.getSocket().setSoTimeout(keepAliveTimeout);
+ }
+ // Get first message of the request
+ if (!readMessage(requestHeaderMessage)) {
+ // This means a connection timeout
+ break;
+ }
+ // Set back timeout if keep alive timeout is enabled
+ if (keepAliveTimeout > 0) {
+ socket.getSocket().setSoTimeout(soTimeout);
+ }
+ // Check message type, process right away and break if
+ // not regular request processing
+ int type = requestHeaderMessage.getByte();
+ if (type == Constants.JK_AJP13_CPING_REQUEST) {
+ try {
+ output.write(pongMessageArray);
+ } catch (IOException e) {
+ error = true;
+ }
+ continue;
+ } else if(type != Constants.JK_AJP13_FORWARD_REQUEST) {
+ // Usually the servlet didn't read the previous request body
+ if(log.isDebugEnabled()) {
+ log.debug(""Unexpected message: ""+type);
+ }
+ continue;
+ }
+
+ request.setStartTime(System.currentTimeMillis());
+ } catch (IOException e) {
+ error = true;
+ break;
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.debug(sm.getString(""ajpprocessor.header.error""), t);
+ // 400 - Bad Request
+ response.setStatus(400);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+
+ if (!error) {
+ // Setting up filters, and parse some request headers
+ rp.setStage(org.apache.coyote.Constants.STAGE_PREPARE);
+ try {
+ prepareRequest();
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.debug(sm.getString(""ajpprocessor.request.prepare""), t);
+ // 400 - Internal Server Error
+ response.setStatus(400);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+ }
+
+ if (endpoint.isPaused()) {
+ // 503 - Service unavailable
+ response.setStatus(503);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+
+ // Process the request in the adapter
+ if (!error) {
+ try {
+ rp.setStage(org.apache.coyote.Constants.STAGE_SERVICE);
+ adapter.service(request, response);
+ } catch (InterruptedIOException e) {
+ error = true;
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.error(sm.getString(""ajpprocessor.request.process""), t);
+ // 500 - Internal Server Error
+ response.setStatus(500);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+ }
+
+ if (isAsync() && !error) {
+ break;
+ }
+
+ // Finish the response if not done yet
+ if (!finished) {
+ try {
+ finish();
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ error = true;
+ }
+ }
+
+ // If there was an error, make sure the request is counted as
+ // and error, and update the statistics counter
+ if (error) {
+ response.setStatus(500);
+ }
+ request.updateCounters();
+
+ rp.setStage(org.apache.coyote.Constants.STAGE_KEEPALIVE);
+ recycle(false);
+ }
+
+ rp.setStage(org.apache.coyote.Constants.STAGE_ENDED);
+
+ if (isAsync() && !error && !endpoint.isPaused()) {
+ return SocketState.LONG;
+ } else {
+ input = null;
+ output = null;
+ return SocketState.CLOSED;
+ }
+
+ }
+
+ @Override
+"
+1," public void send(final String format, final Object... args) {
+ descriptor.debug(logger, format, args);
+ }
+ }
+ final Debug debug = new Debug();
+ Run,?> run = context.getRun();
+ final Result runResult = run.getResult();
+ if (run instanceof AbstractBuild) {
+ Set users = ((AbstractBuild,?>)run).getCulprits();
+ RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug);
+ } else if (runResult != null) {
+ List> builds = new ArrayList<>();
+ Run, ?> build = run;
+ builds.add(build);
+ build = build.getPreviousCompletedBuild();
+ while (build != null) {
+ final Result buildResult = build.getResult();
+ if (buildResult != null) {
+ if (buildResult.isWorseThan(Result.SUCCESS)) {
+ debug.send(""Including build %s with status %s"", build.getId(), buildResult);
+ builds.add(build);
+ } else {
+ break;
+ }
+ }
+ build = build.getPreviousCompletedBuild();
+ }
+ Set users = RecipientProviderUtilities.getChangeSetAuthors(builds, debug);
+ RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug);
+ }
+ }
+
+ @Extension
+ public static final class DescriptorImpl extends RecipientProviderDescriptor {
+
+ @Override
+ public String getDisplayName() {
+ return ""Culprits"";
+ }
+
+ }
+
+}
+"
+1," public Object getValue(Object parent) {
+ return ReflectionHelper.getValue( cascadingMember, parent );
+ }
+
+ @Override
+"
+1," public void testSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException {
+ logger.info(""--> creating repository"");
+ assertAcked(client().admin().cluster().preparePutRepository(""test-repo"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder()
+ .put(""location"", newTempDir(LifecycleScope.SUITE).getAbsolutePath())
+ .put(""compress"", randomBoolean())
+ .put(""chunk_size"", randomIntBetween(100, 1000))));
+ String[] indicesBefore = new String[randomIntBetween(2,5)];
+ String[] indicesAfter = new String[randomIntBetween(2,5)];
+ for (int i = 0; i < indicesBefore.length; i++) {
+ indicesBefore[i] = ""index_before_"" + i;
+ createIndex(indicesBefore[i]);
+ }
+ for (int i = 0; i < indicesAfter.length; i++) {
+ indicesAfter[i] = ""index_after_"" + i;
+ createIndex(indicesAfter[i]);
+ }
+ String[] indices = new String[indicesBefore.length + indicesAfter.length];
+ System.arraycopy(indicesBefore, 0, indices, 0, indicesBefore.length);
+ System.arraycopy(indicesAfter, 0, indices, indicesBefore.length, indicesAfter.length);
+ ensureYellow();
+ logger.info(""--> indexing some data"");
+ IndexRequestBuilder[] buildersBefore = new IndexRequestBuilder[randomIntBetween(10, 200)];
+ for (int i = 0; i < buildersBefore.length; i++) {
+ buildersBefore[i] = client().prepareIndex(RandomPicks.randomFrom(getRandom(), indicesBefore), ""foo"", Integer.toString(i)).setSource(""{ \""foo\"" : \""bar\"" } "");
+ }
+ IndexRequestBuilder[] buildersAfter = new IndexRequestBuilder[randomIntBetween(10, 200)];
+ for (int i = 0; i < buildersAfter.length; i++) {
+ buildersAfter[i] = client().prepareIndex(RandomPicks.randomFrom(getRandom(), indicesBefore), ""bar"", Integer.toString(i)).setSource(""{ \""foo\"" : \""bar\"" } "");
+ }
+ indexRandom(true, buildersBefore);
+ indexRandom(true, buildersAfter);
+ assertThat(client().prepareCount(indices).get().getCount(), equalTo((long) (buildersBefore.length + buildersAfter.length)));
+ long[] counts = new long[indices.length];
+ for (int i = 0; i < indices.length; i++) {
+ counts[i] = client().prepareCount(indices[i]).get().getCount();
+ }
+
+ logger.info(""--> snapshot subset of indices before upgrage"");
+ CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap-1"").setWaitForCompletion(true).setIndices(""index_before_*"").get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
+
+ assertThat(client().admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-snap-1"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+
+ logger.info(""--> delete some data from indices that were already snapshotted"");
+ int howMany = randomIntBetween(1, buildersBefore.length);
+
+ for (int i = 0; i < howMany; i++) {
+ IndexRequestBuilder indexRequestBuilder = RandomPicks.randomFrom(getRandom(), buildersBefore);
+ IndexRequest request = indexRequestBuilder.request();
+ client().prepareDelete(request.index(), request.type(), request.id()).get();
+ }
+ refresh();
+ final long numDocs = client().prepareCount(indices).get().getCount();
+ assertThat(client().prepareCount(indices).get().getCount(), lessThan((long) (buildersBefore.length + buildersAfter.length)));
+
+
+ client().admin().indices().prepareUpdateSettings(indices).setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, ""none"")).get();
+ backwardsCluster().allowOnAllNodes(indices);
+ logClusterState();
+ boolean upgraded;
+ do {
+ logClusterState();
+ CountResponse countResponse = client().prepareCount().get();
+ assertHitCount(countResponse, numDocs);
+ upgraded = backwardsCluster().upgradeOneNode();
+ ensureYellow();
+ countResponse = client().prepareCount().get();
+ assertHitCount(countResponse, numDocs);
+ } while (upgraded);
+ client().admin().indices().prepareUpdateSettings(indices).setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, ""all"")).get();
+
+ logger.info(""--> close indices"");
+ client().admin().indices().prepareClose(""index_before_*"").get();
+
+ logger.info(""--> verify repository"");
+ client().admin().cluster().prepareVerifyRepository(""test-repo"").get();
+
+ logger.info(""--> restore all indices from the snapshot"");
+ RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap-1"").setWaitForCompletion(true).execute().actionGet();
+ assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
+
+ ensureYellow();
+ assertThat(client().prepareCount(indices).get().getCount(), equalTo((long) (buildersBefore.length + buildersAfter.length)));
+ for (int i = 0; i < indices.length; i++) {
+ assertThat(counts[i], equalTo(client().prepareCount(indices[i]).get().getCount()));
+ }
+
+ logger.info(""--> snapshot subset of indices after upgrade"");
+ createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap-2"").setWaitForCompletion(true).setIndices(""index_*"").get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
+
+ // Test restore after index deletion
+ logger.info(""--> delete indices"");
+ String index = RandomPicks.randomFrom(getRandom(), indices);
+ cluster().wipeIndices(index);
+ logger.info(""--> restore one index after deletion"");
+ restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap-2"").setWaitForCompletion(true).setIndices(index).execute().actionGet();
+ assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
+ ensureYellow();
+ assertThat(client().prepareCount(indices).get().getCount(), equalTo((long) (buildersBefore.length + buildersAfter.length)));
+ for (int i = 0; i < indices.length; i++) {
+ assertThat(counts[i], equalTo(client().prepareCount(indices[i]).get().getCount()));
+ }
+ }
+
+"
+1," public void canModifyPassword() throws Exception {
+ ScimUser user = new ScimUser(null, generator.generate()+ ""@foo.com"", ""Jo"", ""User"");
+ user.addEmail(user.getUserName());
+ ScimUser created = db.createUser(user, ""j7hyqpassX"");
+ assertNull(user.getPasswordLastModified());
+ assertNotNull(created.getPasswordLastModified());
+ assertEquals(created.getMeta().getCreated(), created.getPasswordLastModified());
+ Thread.sleep(10);
+ db.changePassword(created.getId(), ""j7hyqpassX"", ""j7hyqpassXXX"");
+
+ user = db.retrieve(created.getId());
+ assertNotNull(user.getPasswordLastModified());
+ assertEquals(user.getMeta().getLastModified(), user.getPasswordLastModified());
+ }
+
+ @Test
+"
+1," public void addRecipients(final ExtendedEmailPublisherContext context, EnvVars env, Set to, Set cc, Set bcc) {
+ final class Debug implements RecipientProviderUtilities.IDebug {
+ private final ExtendedEmailPublisherDescriptor descriptor
+ = Jenkins.getActiveInstance().getDescriptorByType(ExtendedEmailPublisherDescriptor.class);
+
+ private final PrintStream logger = context.getListener().getLogger();
+
+ public void send(final String format, final Object... args) {
+ descriptor.debug(logger, format, args);
+ }
+ }
+ final Debug debug = new Debug();
+ // looking for Upstream build.
+ Run, ?> cur = context.getRun();
+ Cause.UpstreamCause upc = cur.getCause(Cause.UpstreamCause.class);
+ while (upc != null) {
+ // UpstreamCause.getUpStreamProject() returns the full name, so use getItemByFullName
+ Job, ?> p = (Job, ?>) Jenkins.getActiveInstance().getItemByFullName(upc.getUpstreamProject());
+ if (p == null) {
+ context.getListener().getLogger().print(""There is a break in the project linkage, could not retrieve upstream project information"");
+ break;
+ }
+ cur = p.getBuildByNumber(upc.getUpstreamBuild());
+ upc = cur.getCause(Cause.UpstreamCause.class);
+ }
+ addUserTriggeringTheBuild(cur, to, cc, bcc, env, context.getListener(), debug);
+ }
+
+"
+1," public void doHarmonyDecoder(byte[] src, boolean errorExpected,
+ int failPosExpected) {
+ CharsetDecoder decoder = new Utf8Decoder();
+
+ ByteBuffer bb = ByteBuffer.allocate(src.length);
+ CharBuffer cb = CharBuffer.allocate(bb.limit());
+
+ boolean error = false;
+ int i = 0;
+ for (; i < src.length; i++) {
+ bb.put(src[i]);
+ bb.flip();
+ CoderResult cr = decoder.decode(bb, cb, false);
+ if (cr.isError()) {
+ error = true;
+ break;
+ }
+ bb.compact();
+ }
+
+ assertEquals(Boolean.valueOf(errorExpected), Boolean.valueOf(error));
+ assertEquals(failPosExpected, i);
+ }
+"
+1," public byte[] asPortableSerializedByteArray() throws EncryptionException {
+ // Check if this CipherText object is ""complete"", i.e., all
+ // mandatory has been collected.
+ if ( ! collectedAll() ) {
+ String msg = ""Can't serialize this CipherText object yet as not "" +
+ ""all mandatory information has been collected"";
+ throw new EncryptionException(""Can't serialize incomplete ciphertext info"", msg);
+ }
+
+ // If we are supposed to be using a (separate) MAC, also make sure
+ // that it has been computed/stored.
+ boolean usesMAC = ESAPI.securityConfiguration().useMACforCipherText();
+ if ( usesMAC && ! macComputed() ) {
+ String msg = ""Programming error: MAC is required for this cipher mode ("" +
+ getCipherMode() + ""), but MAC has not yet been "" +
+ ""computed and stored. Call the method "" +
+ ""computeAndStoreMAC(SecretKey) first before "" +
+ ""attempting serialization."";
+ throw new EncryptionException(""Can't serialize ciphertext info: Data integrity issue."",
+ msg);
+ }
+
+ // OK, everything ready, so give it a shot.
+ return new CipherTextSerializer(this).asSerializedByteArray();
+ }
+
+ ///// Setters /////
+ /**
+ * Set the raw ciphertext.
+ * @param ciphertext The raw ciphertext.
+ * @throws EncryptionException Thrown if the MAC has already been computed
+ * via {@link #computeAndStoreMAC(SecretKey)}.
+ */
+"
+1," protected void handle(Message msg) throws IOException {
+ inbound.offer(msg);
+ }
+
+ }
+
+}
+"
+1," protected void doPost(HttpServletRequest req, HttpServletResponse resp)
+ throws ServletException, IOException {
+ doGet(req, resp);
+ }
+ }
+
+ @ServletSecurity(@HttpConstraint(EmptyRoleSemantic.DENY))
+ public static class DenyAllServlet extends TestServlet {
+ private static final long serialVersionUID = 1L;
+ }
+
+ public static class SubclassDenyAllServlet extends DenyAllServlet {
+ private static final long serialVersionUID = 1L;
+ }
+
+ @ServletSecurity(@HttpConstraint(EmptyRoleSemantic.PERMIT))
+ public static class SubclassAllowAllServlet extends DenyAllServlet {
+ private static final long serialVersionUID = 1L;
+ }
+
+ @ServletSecurity(value= @HttpConstraint(EmptyRoleSemantic.PERMIT),
+ httpMethodConstraints = {
+ @HttpMethodConstraint(value=""GET"",
+ emptyRoleSemantic = EmptyRoleSemantic.DENY)
+ }
+ )
+ public static class MethodConstraintServlet extends TestServlet {
+ private static final long serialVersionUID = 1L;
+ }
+
+ @ServletSecurity(@HttpConstraint(rolesAllowed = ""testRole""))
+ public static class RoleAllowServlet extends TestServlet {
+ private static final long serialVersionUID = 1L;
+ }
+
+ @ServletSecurity(@HttpConstraint(rolesAllowed = ""otherRole""))
+ public static class RoleDenyServlet extends TestServlet {
+ private static final long serialVersionUID = 1L;
+ }
+}
+"
+1," protected Http11Processor createProcessor() {
+ Http11Processor processor = new Http11Processor(
+ proto.getMaxHttpHeaderSize(), (JIoEndpoint)proto.endpoint,
+ proto.getMaxTrailerSize());
+ processor.setAdapter(proto.getAdapter());
+ processor.setMaxKeepAliveRequests(proto.getMaxKeepAliveRequests());
+ processor.setKeepAliveTimeout(proto.getKeepAliveTimeout());
+ processor.setConnectionUploadTimeout(
+ proto.getConnectionUploadTimeout());
+ processor.setDisableUploadTimeout(proto.getDisableUploadTimeout());
+ processor.setCompressionMinSize(proto.getCompressionMinSize());
+ processor.setCompression(proto.getCompression());
+ processor.setNoCompressionUserAgents(proto.getNoCompressionUserAgents());
+ processor.setCompressableMimeTypes(proto.getCompressableMimeTypes());
+ processor.setRestrictedUserAgents(proto.getRestrictedUserAgents());
+ processor.setSocketBuffer(proto.getSocketBuffer());
+ processor.setMaxSavePostSize(proto.getMaxSavePostSize());
+ processor.setServer(proto.getServer());
+ processor.setDisableKeepAlivePercentage(
+ proto.getDisableKeepAlivePercentage());
+ register(processor);
+ return processor;
+ }
+
+ @Override
+"
+1," public SocketState process(SocketWrapper socket)
+ throws IOException {
+ RequestInfo rp = request.getRequestProcessor();
+ rp.setStage(org.apache.coyote.Constants.STAGE_PARSE);
+
+ // Setting up the socket
+ this.socket = socket.getSocket();
+
+ long soTimeout = endpoint.getSoTimeout();
+
+ // Error flag
+ error = false;
+
+ while (!error && !endpoint.isPaused()) {
+ // Parsing the request header
+ try {
+ // Get first message of the request
+ int bytesRead = readMessage(requestHeaderMessage, false);
+ if (bytesRead == 0) {
+ break;
+ }
+ // Set back timeout if keep alive timeout is enabled
+ if (keepAliveTimeout > 0) {
+ socket.setTimeout(soTimeout);
+ }
+ // Check message type, process right away and break if
+ // not regular request processing
+ int type = requestHeaderMessage.getByte();
+ if (type == Constants.JK_AJP13_CPING_REQUEST) {
+ try {
+ output(pongMessageArray, 0, pongMessageArray.length);
+ } catch (IOException e) {
+ error = true;
+ }
+ recycle(false);
+ continue;
+ } else if(type != Constants.JK_AJP13_FORWARD_REQUEST) {
+ // Usually the servlet didn't read the previous request body
+ if(log.isDebugEnabled()) {
+ log.debug(""Unexpected message: ""+type);
+ }
+ recycle(true);
+ continue;
+ }
+ request.setStartTime(System.currentTimeMillis());
+ } catch (IOException e) {
+ error = true;
+ break;
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.debug(sm.getString(""ajpprocessor.header.error""), t);
+ // 400 - Bad Request
+ response.setStatus(400);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+
+ if (!error) {
+ // Setting up filters, and parse some request headers
+ rp.setStage(org.apache.coyote.Constants.STAGE_PREPARE);
+ try {
+ prepareRequest();
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.debug(sm.getString(""ajpprocessor.request.prepare""), t);
+ // 400 - Internal Server Error
+ response.setStatus(400);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+ }
+
+ if (endpoint.isPaused()) {
+ // 503 - Service unavailable
+ response.setStatus(503);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+
+ // Process the request in the adapter
+ if (!error) {
+ try {
+ rp.setStage(org.apache.coyote.Constants.STAGE_SERVICE);
+ adapter.service(request, response);
+ } catch (InterruptedIOException e) {
+ error = true;
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.error(sm.getString(""ajpprocessor.request.process""), t);
+ // 500 - Internal Server Error
+ response.setStatus(500);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+ }
+
+ if (isAsync() && !error) {
+ break;
+ }
+
+ // Finish the response if not done yet
+ if (!finished) {
+ try {
+ finish();
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ error = true;
+ }
+ }
+
+ // If there was an error, make sure the request is counted as
+ // and error, and update the statistics counter
+ if (error) {
+ response.setStatus(500);
+ }
+ request.updateCounters();
+
+ rp.setStage(org.apache.coyote.Constants.STAGE_KEEPALIVE);
+ // Set keep alive timeout if enabled
+ if (keepAliveTimeout > 0) {
+ socket.setTimeout(keepAliveTimeout);
+ }
+
+ recycle(false);
+ }
+
+ rp.setStage(org.apache.coyote.Constants.STAGE_ENDED);
+
+ if (!error && !endpoint.isPaused()) {
+ if (isAsync()) {
+ return SocketState.LONG;
+ } else {
+ return SocketState.OPEN;
+ }
+ } else {
+ return SocketState.CLOSED;
+ }
+
+ }
+
+
+ // ----------------------------------------------------- ActionHook Methods
+
+
+ /**
+ * Send an action to the connector.
+ *
+ * @param actionCode Type of the action
+ * @param param Action parameter
+ */
+ @Override
+"
+1," public void invoke(Request request, Response response)
+ throws IOException, ServletException {
+
+ if (log.isDebugEnabled())
+ log.debug(""Security checking request "" +
+ request.getMethod() + "" "" + request.getRequestURI());
+ LoginConfig config = this.context.getLoginConfig();
+
+ // Have we got a cached authenticated Principal to record?
+ if (cache) {
+ Principal principal = request.getUserPrincipal();
+ if (principal == null) {
+ Session session = request.getSessionInternal(false);
+ if (session != null) {
+ principal = session.getPrincipal();
+ if (principal != null) {
+ if (log.isDebugEnabled())
+ log.debug(""We have cached auth type "" +
+ session.getAuthType() +
+ "" for principal "" +
+ session.getPrincipal());
+ request.setAuthType(session.getAuthType());
+ request.setUserPrincipal(principal);
+ }
+ }
+ }
+ }
+
+ // Special handling for form-based logins to deal with the case
+ // where the login form (and therefore the ""j_security_check"" URI
+ // to which it submits) might be outside the secured area
+ String contextPath = this.context.getPath();
+ String requestURI = request.getDecodedRequestURI();
+ if (requestURI.startsWith(contextPath) &&
+ requestURI.endsWith(Constants.FORM_ACTION)) {
+ if (!authenticate(request, response, config)) {
+ if (log.isDebugEnabled())
+ log.debug("" Failed authenticate() test ??"" + requestURI );
+ return;
+ }
+ }
+
+ // The Servlet may specify security constraints through annotations.
+ // Ensure that they have been processed before constraints are checked
+ Wrapper wrapper = (Wrapper) request.getMappingData().wrapper;
+ if (wrapper.getServlet() == null) {
+ wrapper.load();
+ }
+
+ Realm realm = this.context.getRealm();
+ // Is this request URI subject to a security constraint?
+ SecurityConstraint [] constraints
+ = realm.findSecurityConstraints(request, this.context);
+
+ if ((constraints == null) /* &&
+ (!Constants.FORM_METHOD.equals(config.getAuthMethod())) */ ) {
+ if (log.isDebugEnabled())
+ log.debug("" Not subject to any constraint"");
+ getNext().invoke(request, response);
+ return;
+ }
+
+ // Make sure that constrained resources are not cached by web proxies
+ // or browsers as caching can provide a security hole
+ if (disableProxyCaching &&
+ // FIXME: Disabled for Mozilla FORM support over SSL
+ // (improper caching issue)
+ //!request.isSecure() &&
+ !""POST"".equalsIgnoreCase(request.getMethod())) {
+ if (securePagesWithPragma) {
+ // FIXME: These cause problems with downloading office docs
+ // from IE under SSL and may not be needed for newer Mozilla
+ // clients.
+ response.setHeader(""Pragma"", ""No-cache"");
+ response.setHeader(""Cache-Control"", ""no-cache"");
+ } else {
+ response.setHeader(""Cache-Control"", ""private"");
+ }
+ response.setHeader(""Expires"", DATE_ONE);
+ }
+
+ int i;
+ // Enforce any user data constraint for this security constraint
+ if (log.isDebugEnabled()) {
+ log.debug("" Calling hasUserDataPermission()"");
+ }
+ if (!realm.hasUserDataPermission(request, response,
+ constraints)) {
+ if (log.isDebugEnabled()) {
+ log.debug("" Failed hasUserDataPermission() test"");
+ }
+ /*
+ * ASSERT: Authenticator already set the appropriate
+ * HTTP status code, so we do not have to do anything special
+ */
+ return;
+ }
+
+ // Since authenticate modifies the response on failure,
+ // we have to check for allow-from-all first.
+ boolean authRequired = true;
+ for(i=0; i < constraints.length && authRequired; i++) {
+ if(!constraints[i].getAuthConstraint()) {
+ authRequired = false;
+ } else if(!constraints[i].getAllRoles()) {
+ String [] roles = constraints[i].findAuthRoles();
+ if(roles == null || roles.length == 0) {
+ authRequired = false;
+ }
+ }
+ }
+
+ if(authRequired) {
+ if (log.isDebugEnabled()) {
+ log.debug("" Calling authenticate()"");
+ }
+ if (!authenticate(request, response, config)) {
+ if (log.isDebugEnabled()) {
+ log.debug("" Failed authenticate() test"");
+ }
+ /*
+ * ASSERT: Authenticator already set the appropriate
+ * HTTP status code, so we do not have to do anything
+ * special
+ */
+ return;
+ }
+
+ }
+
+ if (log.isDebugEnabled()) {
+ log.debug("" Calling accessControl()"");
+ }
+ if (!realm.hasResourcePermission(request, response,
+ constraints,
+ this.context)) {
+ if (log.isDebugEnabled()) {
+ log.debug("" Failed accessControl() test"");
+ }
+ /*
+ * ASSERT: AccessControl method has already set the
+ * appropriate HTTP status code, so we do not have to do
+ * anything special
+ */
+ return;
+ }
+
+ // Any and all specified constraints have been satisfied
+ if (log.isDebugEnabled()) {
+ log.debug("" Successfully passed all security constraints"");
+ }
+ getNext().invoke(request, response);
+
+ }
+
+
+ // ------------------------------------------------------ Protected Methods
+
+
+ /**
+ * Associate the specified single sign on identifier with the
+ * specified Session.
+ *
+ * @param ssoId Single sign on identifier
+ * @param session Session to be associated
+ */
+"
+1," public void setUp() throws Exception
+ {
+ super.setUp();
+ _expectedResult = mock(AuthenticationResult.class);
+ _authenticationProvider = mock(UsernamePasswordAuthenticationProvider.class);
+ when(_authenticationProvider.authenticate(eq(VALID_USERNAME), eq(VALID_PASSWORD))).thenReturn(_expectedResult);
+ _negotiator = new PlainNegotiator(_authenticationProvider);
+ }
+
+ @Override
+"
+1," public byte[] toByteArray()
+ {
+ /* index || secretKeySeed || secretKeyPRF || publicSeed || root */
+ int n = params.getDigestSize();
+ int indexSize = (params.getHeight() + 7) / 8;
+ int secretKeySize = n;
+ int secretKeyPRFSize = n;
+ int publicSeedSize = n;
+ int rootSize = n;
+ int totalSize = indexSize + secretKeySize + secretKeyPRFSize + publicSeedSize + rootSize;
+ byte[] out = new byte[totalSize];
+ int position = 0;
+ /* copy index */
+ byte[] indexBytes = XMSSUtil.toBytesBigEndian(index, indexSize);
+ XMSSUtil.copyBytesAtOffset(out, indexBytes, position);
+ position += indexSize;
+ /* copy secretKeySeed */
+ XMSSUtil.copyBytesAtOffset(out, secretKeySeed, position);
+ position += secretKeySize;
+ /* copy secretKeyPRF */
+ XMSSUtil.copyBytesAtOffset(out, secretKeyPRF, position);
+ position += secretKeyPRFSize;
+ /* copy publicSeed */
+ XMSSUtil.copyBytesAtOffset(out, publicSeed, position);
+ position += publicSeedSize;
+ /* copy root */
+ XMSSUtil.copyBytesAtOffset(out, root, position);
+ /* concatenate bdsState */
+ byte[] bdsStateOut = null;
+ try
+ {
+ bdsStateOut = XMSSUtil.serialize(bdsState);
+ }
+ catch (IOException e)
+ {
+ e.printStackTrace();
+ throw new RuntimeException(""error serializing bds state"");
+ }
+ return Arrays.concatenate(out, bdsStateOut);
+ }
+
+"
+1," public TransformerFactory createTransformerFactory() {
+ TransformerFactory factory = TransformerFactory.newInstance();
+ factory.setErrorListener(new XmlErrorListener());
+ return factory;
+ }
+
+"
+1," public static void addUsers(final Set users, final TaskListener listener, final EnvVars env,
+ final Set to, final Set cc, final Set bcc, final IDebug debug) {
+ for (final User user : users) {
+ if (EmailRecipientUtils.isExcludedRecipient(user, listener)) {
+ debug.send(""User %s is an excluded recipient."", user.getFullName());
+ } else {
+ final String userAddress = EmailRecipientUtils.getUserConfiguredEmail(user);
+ if (userAddress != null) {
+ debug.send(""Adding %s with address %s"", user.getFullName(), userAddress);
+ EmailRecipientUtils.addAddressesFromRecipientList(to, cc, bcc, userAddress, env, listener);
+ } else {
+ listener.getLogger().println(""Failed to send e-mail to ""
+ + user.getFullName()
+ + "" because no e-mail address is known, and no default e-mail domain is configured"");
+ }
+ }
+ }
+ }
+"
+1," PlainText decrypt(SecretKey key, CipherText ciphertext) throws EncryptionException;
+
+ /**
+ * Create a digital signature for the provided data and return it in a
+ * string.
+ *
+ * Limitations: A new public/private key pair used for ESAPI 2.0 digital
+ * signatures with this method and {@link #verifySignature(String, String)}
+ * are dynamically created when the default reference implementation class,
+ * {@link org.owasp.esapi.reference.crypto.JavaEncryptor} is first created.
+ * Because this key pair is not persisted nor is the public key shared,
+ * this method and the corresponding {@link #verifySignature(String, String)}
+ * can not be used with expected results across JVM instances. This limitation
+ * will be addressed in ESAPI 2.1.
+ *
+ *
+ * @param data
+ * the data to sign
+ *
+ * @return
+ * the digital signature stored as a String
+ *
+ * @throws EncryptionException
+ * if the specified signature algorithm cannot be found
+ */
+"
+1," public void init(KeyGenerationParameters param)
+ {
+ this.param = (RSAKeyGenerationParameters)param;
+ this.iterations = getNumberOfIterations(this.param.getStrength(), this.param.getCertainty());
+ }
+
+"
+1," private ResetPasswordResponse changePasswordCodeAuthenticated(String code, String newPassword) {
+ ExpiringCode expiringCode = expiringCodeStore.retrieveCode(code);
+ if (expiringCode == null) {
+ throw new InvalidCodeException(""invalid_code"", ""Sorry, your reset password link is no longer valid. Please request a new one"", 422);
+ }
+ String userId;
+ String userName = null;
+ Date passwordLastModified = null;
+ String clientId = null;
+ String redirectUri = null;
+ try {
+ PasswordChange change = JsonUtils.readValue(expiringCode.getData(), PasswordChange.class);
+ userId = change.getUserId();
+ userName = change.getUsername();
+ passwordLastModified = change.getPasswordModifiedTime();
+ clientId = change.getClientId();
+ redirectUri = change.getRedirectUri();
+ } catch (JsonUtils.JsonUtilException x) {
+ userId = expiringCode.getData();
+ }
+ ScimUser user = scimUserProvisioning.retrieve(userId);
+ try {
+ if (isUserModified(user, expiringCode.getExpiresAt(), userName, passwordLastModified)) {
+ throw new UaaException(""Invalid password reset request."");
+ }
+ if (!user.isVerified()) {
+ scimUserProvisioning.verifyUser(userId, -1);
+ }
+ if (scimUserProvisioning.checkPasswordMatches(userId, newPassword)) {
+ throw new InvalidPasswordException(""Your new password cannot be the same as the old password."", UNPROCESSABLE_ENTITY);
+ }
+ scimUserProvisioning.changePassword(userId, null, newPassword);
+ publish(new PasswordChangeEvent(""Password changed"", getUaaUser(user), SecurityContextHolder.getContext().getAuthentication()));
+
+ String redirectLocation = ""home"";
+ if (!isEmpty(clientId) && !isEmpty(redirectUri)) {
+ try {
+ ClientDetails clientDetails = clientDetailsService.loadClientByClientId(clientId);
+ Set redirectUris = clientDetails.getRegisteredRedirectUri() == null ? Collections.emptySet() :
+ clientDetails.getRegisteredRedirectUri();
+ String matchingRedirectUri = UaaUrlUtils.findMatchingRedirectUri(redirectUris, redirectUri, null);
+ if (matchingRedirectUri != null) {
+ redirectLocation = matchingRedirectUri;
+ }
+ } catch (NoSuchClientException nsce) {}
+ }
+ return new ResetPasswordResponse(user, redirectLocation, clientId);
+ } catch (Exception e) {
+ publish(new PasswordChangeFailureEvent(e.getMessage(), getUaaUser(user), SecurityContextHolder.getContext().getAuthentication()));
+ throw e;
+ }
+ }
+
+ @Override
+"
+1," public boolean accept(File pathname) {
+ return pathname.isDirectory() && new File(pathname, ""config.xml"").isFile() && idStrategy().equals(
+ pathname.getName(), id);
+ }
+ });
+ }
+
+ /**
+ * Gets the directory where Hudson stores user information.
+ */
+"
+1," public void register(ContainerBuilder builder, LocatableProperties props) {
+ alias(ObjectFactory.class, StrutsConstants.STRUTS_OBJECTFACTORY, builder, props);
+ alias(FileManagerFactory.class, StrutsConstants.STRUTS_FILE_MANAGER_FACTORY, builder, props, Scope.SINGLETON);
+
+ alias(XWorkConverter.class, StrutsConstants.STRUTS_XWORKCONVERTER, builder, props);
+ alias(CollectionConverter.class, StrutsConstants.STRUTS_CONVERTER_COLLECTION, builder, props);
+ alias(ArrayConverter.class, StrutsConstants.STRUTS_CONVERTER_ARRAY, builder, props);
+ alias(DateConverter.class, StrutsConstants.STRUTS_CONVERTER_DATE, builder, props);
+ alias(NumberConverter.class, StrutsConstants.STRUTS_CONVERTER_NUMBER, builder, props);
+ alias(StringConverter.class, StrutsConstants.STRUTS_CONVERTER_STRING, builder, props);
+
+ alias(ConversionPropertiesProcessor.class, StrutsConstants.STRUTS_CONVERTER_PROPERTIES_PROCESSOR, builder, props);
+ alias(ConversionFileProcessor.class, StrutsConstants.STRUTS_CONVERTER_FILE_PROCESSOR, builder, props);
+ alias(ConversionAnnotationProcessor.class, StrutsConstants.STRUTS_CONVERTER_ANNOTATION_PROCESSOR, builder, props);
+ alias(TypeConverterCreator.class, StrutsConstants.STRUTS_CONVERTER_CREATOR, builder, props);
+ alias(TypeConverterHolder.class, StrutsConstants.STRUTS_CONVERTER_HOLDER, builder, props);
+
+ alias(TextProvider.class, StrutsConstants.STRUTS_XWORKTEXTPROVIDER, builder, props, Scope.DEFAULT);
+
+ alias(LocaleProvider.class, StrutsConstants.STRUTS_LOCALE_PROVIDER, builder, props);
+ alias(ActionProxyFactory.class, StrutsConstants.STRUTS_ACTIONPROXYFACTORY, builder, props);
+ alias(ObjectTypeDeterminer.class, StrutsConstants.STRUTS_OBJECTTYPEDETERMINER, builder, props);
+ alias(ActionMapper.class, StrutsConstants.STRUTS_MAPPER_CLASS, builder, props);
+ alias(MultiPartRequest.class, StrutsConstants.STRUTS_MULTIPART_PARSER, builder, props, Scope.DEFAULT);
+ alias(FreemarkerManager.class, StrutsConstants.STRUTS_FREEMARKER_MANAGER_CLASSNAME, builder, props);
+ alias(VelocityManager.class, StrutsConstants.STRUTS_VELOCITY_MANAGER_CLASSNAME, builder, props);
+ alias(UrlRenderer.class, StrutsConstants.STRUTS_URL_RENDERER, builder, props);
+ alias(ActionValidatorManager.class, StrutsConstants.STRUTS_ACTIONVALIDATORMANAGER, builder, props);
+ alias(ValueStackFactory.class, StrutsConstants.STRUTS_VALUESTACKFACTORY, builder, props);
+ alias(ReflectionProvider.class, StrutsConstants.STRUTS_REFLECTIONPROVIDER, builder, props);
+ alias(ReflectionContextFactory.class, StrutsConstants.STRUTS_REFLECTIONCONTEXTFACTORY, builder, props);
+ alias(PatternMatcher.class, StrutsConstants.STRUTS_PATTERNMATCHER, builder, props);
+ alias(StaticContentLoader.class, StrutsConstants.STRUTS_STATIC_CONTENT_LOADER, builder, props);
+ alias(UnknownHandlerManager.class, StrutsConstants.STRUTS_UNKNOWN_HANDLER_MANAGER, builder, props);
+ alias(UrlHelper.class, StrutsConstants.STRUTS_URL_HELPER, builder, props);
+
+ alias(TextParser.class, StrutsConstants.STRUTS_EXPRESSION_PARSER, builder, props);
+
+ if (""true"".equalsIgnoreCase(props.getProperty(StrutsConstants.STRUTS_DEVMODE))) {
+ props.setProperty(StrutsConstants.STRUTS_I18N_RELOAD, ""true"");
+ props.setProperty(StrutsConstants.STRUTS_CONFIGURATION_XML_RELOAD, ""true"");
+ props.setProperty(StrutsConstants.STRUTS_FREEMARKER_TEMPLATES_CACHE, ""false"");
+ props.setProperty(StrutsConstants.STRUTS_FREEMARKER_TEMPLATES_CACHE_UPDATE_DELAY, ""0"");
+ // Convert struts properties into ones that xwork expects
+ props.setProperty(XWorkConstants.DEV_MODE, ""true"");
+ } else {
+ props.setProperty(XWorkConstants.DEV_MODE, ""false"");
+ }
+
+ // Convert Struts properties into XWork properties
+ convertIfExist(props, StrutsConstants.STRUTS_LOG_MISSING_PROPERTIES, XWorkConstants.LOG_MISSING_PROPERTIES);
+ convertIfExist(props, StrutsConstants.STRUTS_ENABLE_OGNL_EXPRESSION_CACHE, XWorkConstants.ENABLE_OGNL_EXPRESSION_CACHE);
+ convertIfExist(props, StrutsConstants.STRUTS_ALLOW_STATIC_METHOD_ACCESS, XWorkConstants.ALLOW_STATIC_METHOD_ACCESS);
+ convertIfExist(props, StrutsConstants.STRUTS_CONFIGURATION_XML_RELOAD, XWorkConstants.RELOAD_XML_CONFIGURATION);
+
+ LocalizedTextUtil.addDefaultResourceBundle(""org/apache/struts2/struts-messages"");
+ loadCustomResourceBundles(props);
+ }
+
+"
+1," public void changePassword_Returns422UnprocessableEntity_NewPasswordSameAsOld() throws Exception {
+
+ Mockito.reset(passwordValidator);
+
+ when(expiringCodeStore.retrieveCode(""emailed_code""))
+ .thenReturn(new ExpiringCode(""emailed_code"", new Timestamp(System.currentTimeMillis()+ UaaResetPasswordService.PASSWORD_RESET_LIFETIME), ""eyedee"", null));
+
+ ScimUser scimUser = new ScimUser(""eyedee"", ""user@example.com"", ""User"", ""Man"");
+ scimUser.setMeta(new ScimMeta(new Date(System.currentTimeMillis()-(1000*60*60*24)), new Date(System.currentTimeMillis()-(1000*60*60*24)), 0));
+ scimUser.addEmail(""user@example.com"");
+ scimUser.setVerified(true);
+
+ when(scimUserProvisioning.retrieve(""eyedee"")).thenReturn(scimUser);
+ when(scimUserProvisioning.checkPasswordMatches(""eyedee"", ""new_secret"")).thenReturn(true);
+
+ MockHttpServletRequestBuilder post = post(""/password_change"")
+ .contentType(APPLICATION_JSON)
+ .content(""{\""code\"":\""emailed_code\"",\""new_password\"":\""new_secret\""}"")
+ .accept(APPLICATION_JSON);
+
+ SecurityContextHolder.getContext().setAuthentication(new MockAuthentication());
+
+ mockMvc.perform(post)
+ .andExpect(status().isUnprocessableEntity())
+ .andExpect(content().string(JsonObjectMatcherUtils.matchesJsonObject(new JSONObject().put(""error_description"", ""Your new password cannot be the same as the old password."").put(""message"", ""Your new password cannot be the same as the old password."").put(""error"", ""invalid_password""))));
+ }
+"
+1," private void setValidatedValueHandlerToValueContextIfPresent(ValidationContext> validationContext,
+ ValueContext, T> valueContext, ConstraintMetaData metaData) {
+ if ( metaData.requiresUnwrapping() ) {
+ @SuppressWarnings(""unchecked"") //we know the handler matches the value type
+ ValidatedValueUnwrapper super T> handler = (ValidatedValueUnwrapper) getValidatedValueHandler(
+ metaData.getType()
+ );
+
+ if ( handler == null ) {
+ throw log.getNoUnwrapperFoundForTypeException( metaData.getType().toString() );
+ }
+
+ valueContext.setValidatedValueHandler( handler );
+ }
+ }
+"
+1," public boolean getValidateClientProvidedNewSessionId() { return false; }
+"
+1," protected Object findValue(String expr, Class toType) {
+ if (altSyntax() && toType == String.class) {
+ return TextParseUtil.translateVariables('%', expr, stack);
+ } else {
+ expr = stripExpressionIfAltSyntax(expr);
+
+ return getStack().findValue(expr, toType, throwExceptionOnELFailure);
+ }
+ }
+
+ /**
+ * Renders an action URL by consulting the {@link org.apache.struts2.dispatcher.mapper.ActionMapper}.
+ * @param action the action
+ * @param namespace the namespace
+ * @param method the method
+ * @param req HTTP request
+ * @param res HTTP response
+ * @param parameters parameters
+ * @param scheme http or https
+ * @param includeContext should the context path be included or not
+ * @param encodeResult should the url be encoded
+ * @param forceAddSchemeHostAndPort should the scheme host and port be forced
+ * @param escapeAmp should ampersand (&) be escaped to &
+ * @return the action url.
+ */
+"
+1," public HttpBinding getBinding() {
+ if (this.binding == null) {
+ this.binding = new AttachmentHttpBinding();
+ this.binding.setTransferException(isTransferException());
+ this.binding.setHeaderFilterStrategy(getHeaderFilterStrategy());
+ }
+ return this.binding;
+ }
+
+ @Override
+"
+1," public HttpBinding getBinding() {
+ if (binding == null) {
+ // create a new binding and use the options from this endpoint
+ binding = new DefaultHttpBinding();
+ binding.setHeaderFilterStrategy(getHeaderFilterStrategy());
+ binding.setTransferException(isTransferException());
+ binding.setEagerCheckContentAvailable(isEagerCheckContentAvailable());
+ }
+ return binding;
+ }
+
+ /**
+ * To use a custom HttpBinding to control the mapping between Camel message and HttpClient.
+ */
+"
+1," public void setMaxTrailerSize(int maxTrailerSize) {
+ this.maxTrailerSize = maxTrailerSize;
+ }
+
+
+ /**
+ * This field indicates if the protocol is treated as if it is secure. This
+ * normally means https is being used but can be used to fake https e.g
+ * behind a reverse proxy.
+ */
+"
+1," public void addRecipients(final ExtendedEmailPublisherContext context, final EnvVars env,
+ final Set to, final Set cc, final Set bcc) {
+
+ final class Debug implements RecipientProviderUtilities.IDebug {
+ private final ExtendedEmailPublisherDescriptor descriptor
+ = Jenkins.getActiveInstance().getDescriptorByType(ExtendedEmailPublisherDescriptor.class);
+
+ private final PrintStream logger = context.getListener().getLogger();
+
+ public void send(final String format, final Object... args) {
+ descriptor.debug(logger, format, args);
+ }
+ }
+ final Debug debug = new Debug();
+
+ Set users = null;
+
+ final Run, ?> currentRun = context.getRun();
+ if (currentRun == null) {
+ debug.send(""currentRun was null"");
+ } else {
+ if (!Objects.equals(currentRun.getResult(), Result.FAILURE)) {
+ debug.send(""currentBuild did not fail"");
+ } else {
+ users = new HashSet<>();
+ debug.send(""Collecting builds with suspects..."");
+ final HashSet> buildsWithSuspects = new HashSet<>();
+ Run, ?> firstFailedBuild = currentRun;
+ Run, ?> candidate = currentRun;
+ while (candidate != null) {
+ final Result candidateResult = candidate.getResult();
+ if ( candidateResult == null || !candidateResult.isWorseOrEqualTo(Result.FAILURE) ) {
+ break;
+ }
+ firstFailedBuild = candidate;
+ candidate = candidate.getPreviousCompletedBuild();
+ }
+ if (firstFailedBuild instanceof AbstractBuild) {
+ buildsWithSuspects.add(firstFailedBuild);
+ } else {
+ debug.send("" firstFailedBuild was not an instance of AbstractBuild"");
+ }
+ debug.send(""Collecting suspects..."");
+ users.addAll(RecipientProviderUtilities.getChangeSetAuthors(buildsWithSuspects, debug));
+ users.addAll(RecipientProviderUtilities.getUsersTriggeringTheBuilds(buildsWithSuspects, debug));
+ }
+ }
+ if (users != null) {
+ RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug);
+ }
+ }
+
+ @Extension
+"
+1," public void doFilter(ServletRequest filterRequest,
+ ServletResponse filterResponse) throws IOException, ServletException {
+ assertEquals(""Invalid method"", ""POST"",
+ ((HttpServletRequest) filterRequest).getMethod());
+ }
+ };
+"
+1," public void restorePersistentSettingsTest() throws Exception {
+ logger.info(""--> start 2 nodes"");
+ Settings nodeSettings = settingsBuilder()
+ .put(""discovery.type"", ""zen"")
+ .put(""discovery.zen.ping_timeout"", ""200ms"")
+ .put(""discovery.initial_state_timeout"", ""500ms"")
+ .build();
+ internalCluster().startNode(nodeSettings);
+ Client client = client();
+ String secondNode = internalCluster().startNode(nodeSettings);
+ logger.info(""--> wait for the second node to join the cluster"");
+ assertThat(client.admin().cluster().prepareHealth().setWaitForNodes(""2"").get().isTimedOut(), equalTo(false));
+
+ int random = randomIntBetween(10, 42);
+
+ logger.info(""--> set test persistent setting"");
+ client.admin().cluster().prepareUpdateSettings().setPersistentSettings(
+ ImmutableSettings.settingsBuilder()
+ .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 2)
+ .put(IndicesTTLService.INDICES_TTL_INTERVAL, random, TimeUnit.MINUTES))
+ .execute().actionGet();
+
+ assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState()
+ .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis()));
+ assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState()
+ .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1), equalTo(2));
+
+ logger.info(""--> create repository"");
+ PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository(""test-repo"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder().put(""location"", newTempDir())).execute().actionGet();
+ assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
+
+ logger.info(""--> start snapshot"");
+ CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).execute().actionGet();
+ assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(0));
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(0));
+ assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-snap"").execute().actionGet().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+
+ logger.info(""--> clean the test persistent setting"");
+ client.admin().cluster().prepareUpdateSettings().setPersistentSettings(
+ ImmutableSettings.settingsBuilder()
+ .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, 1)
+ .put(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)))
+ .execute().actionGet();
+ assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState()
+ .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(1).millis()));
+
+ stopNode(secondNode);
+ assertThat(client.admin().cluster().prepareHealth().setWaitForNodes(""1"").get().isTimedOut(), equalTo(false));
+
+ logger.info(""--> restore snapshot"");
+ client.admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap"").setRestoreGlobalState(true).setWaitForCompletion(true).execute().actionGet();
+ assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState()
+ .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL, TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis()));
+
+ logger.info(""--> ensure that zen discovery minimum master nodes wasn't restored"");
+ assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState()
+ .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1), not(equalTo(2)));
+ }
+
+ @Test
+"
+1," public void init(FilterConfig conf) throws ServletException {
+ if (conf != null && ""zookeeper"".equals(conf.getInitParameter(""signer.secret.provider""))) {
+ SolrZkClient zkClient =
+ (SolrZkClient)conf.getServletContext().getAttribute(KerberosPlugin.DELEGATION_TOKEN_ZK_CLIENT);
+ conf.getServletContext().setAttribute(""signer.secret.provider.zookeeper.curator.client"",
+ getCuratorClient(zkClient));
+ }
+ super.init(conf);
+ }
+
+ /**
+ * Return the ProxyUser Configuration. FilterConfig properties beginning with
+ * ""solr.impersonator.user.name"" will be added to the configuration.
+ */
+ @Override
+"
+1," Attributes setPropertiesFromAttributes(
+ StylesheetHandler handler, String rawName, Attributes attributes,
+ ElemTemplateElement target, boolean throwError)
+ throws org.xml.sax.SAXException
+ {
+
+ XSLTElementDef def = getElemDef();
+ AttributesImpl undefines = null;
+ boolean isCompatibleMode = ((null != handler.getStylesheet()
+ && handler.getStylesheet().getCompatibleMode())
+ || !throwError);
+ if (isCompatibleMode)
+ undefines = new AttributesImpl();
+
+
+ // Keep track of which XSLTAttributeDefs have been processed, so
+ // I can see which default values need to be set.
+ List processedDefs = new ArrayList();
+
+ // Keep track of XSLTAttributeDefs that were invalid
+ List errorDefs = new ArrayList();
+ int nAttrs = attributes.getLength();
+
+ for (int i = 0; i < nAttrs; i++)
+ {
+ String attrUri = attributes.getURI(i);
+ // Hack for Crimson. -sb
+ if((null != attrUri) && (attrUri.length() == 0)
+ && (attributes.getQName(i).startsWith(""xmlns:"") ||
+ attributes.getQName(i).equals(""xmlns"")))
+ {
+ attrUri = org.apache.xalan.templates.Constants.S_XMLNAMESPACEURI;
+ }
+ String attrLocalName = attributes.getLocalName(i);
+ XSLTAttributeDef attrDef = def.getAttributeDef(attrUri, attrLocalName);
+
+ if (null == attrDef)
+ {
+ if (!isCompatibleMode)
+ {
+
+ // Then barf, because this element does not allow this attribute.
+ handler.error(XSLTErrorResources.ER_ATTR_NOT_ALLOWED, new Object[]{attributes.getQName(i), rawName}, null);//""\""""+attributes.getQName(i)+""\""""
+ //+ "" attribute is not allowed on the "" + rawName
+ // + "" element!"", null);
+ }
+ else
+ {
+ undefines.addAttribute(attrUri, attrLocalName,
+ attributes.getQName(i),
+ attributes.getType(i),
+ attributes.getValue(i));
+ }
+ }
+ else
+ {
+ // Can we switch the order here:
+
+ boolean success = attrDef.setAttrValue(handler, attrUri, attrLocalName,
+ attributes.getQName(i), attributes.getValue(i),
+ target);
+
+ // Now we only add the element if it passed a validation check
+ if (success)
+ processedDefs.add(attrDef);
+ else
+ errorDefs.add(attrDef);
+ }
+ }
+
+ XSLTAttributeDef[] attrDefs = def.getAttributes();
+ int nAttrDefs = attrDefs.length;
+
+ for (int i = 0; i < nAttrDefs; i++)
+ {
+ XSLTAttributeDef attrDef = attrDefs[i];
+ String defVal = attrDef.getDefault();
+
+ if (null != defVal)
+ {
+ if (!processedDefs.contains(attrDef))
+ {
+ attrDef.setDefAttrValue(handler, target);
+ }
+ }
+
+ if (attrDef.getRequired())
+ {
+ if ((!processedDefs.contains(attrDef)) && (!errorDefs.contains(attrDef)))
+ handler.error(
+ XSLMessages.createMessage(
+ XSLTErrorResources.ER_REQUIRES_ATTRIB, new Object[]{ rawName,
+ attrDef.getName() }), null);
+ }
+ }
+
+ return undefines;
+ }
+"
+1," public SecurityConstraint [] findSecurityConstraints(Request request,
+ Context context) {
+
+ ArrayList results = null;
+ // Are there any defined security constraints?
+ SecurityConstraint constraints[] = context.findConstraints();
+ if ((constraints == null) || (constraints.length == 0)) {
+ if (log.isDebugEnabled())
+ log.debug("" No applicable constraints defined"");
+ return null;
+ }
+
+ // Check each defined security constraint
+ String uri = request.getRequestPathMB().toString();
+ // Bug47080 - in rare cases this may be null
+ // Mapper treats as '/' do the same to prevent NPE
+ if (uri == null) {
+ uri = ""/"";
+ }
+
+ String method = request.getMethod();
+ int i;
+ boolean found = false;
+ for (i = 0; i < constraints.length; i++) {
+ SecurityCollection [] collection = constraints[i].findCollections();
+
+ // If collection is null, continue to avoid an NPE
+ // See Bugzilla 30624
+ if ( collection == null) {
+ continue;
+ }
+
+ if (log.isDebugEnabled()) {
+ log.debug("" Checking constraint '"" + constraints[i] +
+ ""' against "" + method + "" "" + uri + "" --> "" +
+ constraints[i].included(uri, method));
+ }
+
+ for(int j=0; j < collection.length; j++){
+ String [] patterns = collection[j].findPatterns();
+
+ // If patterns is null, continue to avoid an NPE
+ // See Bugzilla 30624
+ if ( patterns == null) {
+ continue;
+ }
+
+ for(int k=0; k < patterns.length; k++) {
+ if(uri.equals(patterns[k])) {
+ found = true;
+ if(collection[j].findMethod(method)) {
+ if(results == null) {
+ results = new ArrayList<>();
+ }
+ results.add(constraints[i]);
+ }
+ }
+ }
+ }
+ }
+
+ if(found) {
+ return resultsToArray(results);
+ }
+
+ int longest = -1;
+
+ for (i = 0; i < constraints.length; i++) {
+ SecurityCollection [] collection = constraints[i].findCollections();
+
+ // If collection is null, continue to avoid an NPE
+ // See Bugzilla 30624
+ if ( collection == null) {
+ continue;
+ }
+
+ if (log.isDebugEnabled()) {
+ log.debug("" Checking constraint '"" + constraints[i] +
+ ""' against "" + method + "" "" + uri + "" --> "" +
+ constraints[i].included(uri, method));
+ }
+
+ for(int j=0; j < collection.length; j++){
+ String [] patterns = collection[j].findPatterns();
+
+ // If patterns is null, continue to avoid an NPE
+ // See Bugzilla 30624
+ if ( patterns == null) {
+ continue;
+ }
+
+ boolean matched = false;
+ int length = -1;
+ for(int k=0; k < patterns.length; k++) {
+ String pattern = patterns[k];
+ if(pattern.startsWith(""/"") && pattern.endsWith(""/*"") &&
+ pattern.length() >= longest) {
+
+ if(pattern.length() == 2) {
+ matched = true;
+ length = pattern.length();
+ } else if(pattern.regionMatches(0,uri,0,
+ pattern.length()-1) ||
+ (pattern.length()-2 == uri.length() &&
+ pattern.regionMatches(0,uri,0,
+ pattern.length()-2))) {
+ matched = true;
+ length = pattern.length();
+ }
+ }
+ }
+ if(matched) {
+ if(length > longest) {
+ found = false;
+ if(results != null) {
+ results.clear();
+ }
+ longest = length;
+ }
+ if(collection[j].findMethod(method)) {
+ found = true;
+ if(results == null) {
+ results = new ArrayList<>();
+ }
+ results.add(constraints[i]);
+ }
+ }
+ }
+ }
+
+ if(found) {
+ return resultsToArray(results);
+ }
+
+ for (i = 0; i < constraints.length; i++) {
+ SecurityCollection [] collection = constraints[i].findCollections();
+
+ // If collection is null, continue to avoid an NPE
+ // See Bugzilla 30624
+ if ( collection == null) {
+ continue;
+ }
+
+ if (log.isDebugEnabled()) {
+ log.debug("" Checking constraint '"" + constraints[i] +
+ ""' against "" + method + "" "" + uri + "" --> "" +
+ constraints[i].included(uri, method));
+ }
+
+ boolean matched = false;
+ int pos = -1;
+ for(int j=0; j < collection.length; j++){
+ String [] patterns = collection[j].findPatterns();
+
+ // If patterns is null, continue to avoid an NPE
+ // See Bugzilla 30624
+ if ( patterns == null) {
+ continue;
+ }
+
+ for(int k=0; k < patterns.length && !matched; k++) {
+ String pattern = patterns[k];
+ if(pattern.startsWith(""*."")){
+ int slash = uri.lastIndexOf('/');
+ int dot = uri.lastIndexOf('.');
+ if(slash >= 0 && dot > slash &&
+ dot != uri.length()-1 &&
+ uri.length()-dot == pattern.length()-1) {
+ if(pattern.regionMatches(1,uri,dot,uri.length()-dot)) {
+ matched = true;
+ pos = j;
+ }
+ }
+ }
+ }
+ }
+ if(matched) {
+ found = true;
+ if(collection[pos].findMethod(method)) {
+ if(results == null) {
+ results = new ArrayList<>();
+ }
+ results.add(constraints[i]);
+ }
+ }
+ }
+
+ if(found) {
+ return resultsToArray(results);
+ }
+
+ for (i = 0; i < constraints.length; i++) {
+ SecurityCollection [] collection = constraints[i].findCollections();
+
+ // If collection is null, continue to avoid an NPE
+ // See Bugzilla 30624
+ if ( collection == null) {
+ continue;
+ }
+
+ if (log.isDebugEnabled()) {
+ log.debug("" Checking constraint '"" + constraints[i] +
+ ""' against "" + method + "" "" + uri + "" --> "" +
+ constraints[i].included(uri, method));
+ }
+
+ for(int j=0; j < collection.length; j++){
+ String [] patterns = collection[j].findPatterns();
+
+ // If patterns is null, continue to avoid an NPE
+ // See Bugzilla 30624
+ if ( patterns == null) {
+ continue;
+ }
+
+ boolean matched = false;
+ for(int k=0; k < patterns.length && !matched; k++) {
+ String pattern = patterns[k];
+ if(pattern.equals(""/"")){
+ matched = true;
+ }
+ }
+ if(matched) {
+ if(results == null) {
+ results = new ArrayList<>();
+ }
+ results.add(constraints[i]);
+ }
+ }
+ }
+
+ if(results == null) {
+ // No applicable security constraint was found
+ if (log.isDebugEnabled())
+ log.debug("" No applicable constraint located"");
+ }
+ return resultsToArray(results);
+ }
+
+ /**
+ * Convert an ArrayList to a SecurityConstraint [].
+ */
+"
+1," private void writeSession(SessionInformations session, boolean displayUser) throws IOException {
+ final String nextColumnAlignRight = """";
+ final String nextColumnAlignCenter = "" "";
+ write("" "");
+ write(htmlEncodeButNotSpace(session.getId()));
+ write("" "");
+ write(nextColumnAlignRight);
+ write(durationFormat.format(session.getLastAccess()));
+ write(nextColumnAlignRight);
+ write(durationFormat.format(session.getAge()));
+ write(nextColumnAlignRight);
+ write(expiryFormat.format(session.getExpirationDate()));
+
+ write(nextColumnAlignRight);
+ write(integerFormat.format(session.getAttributeCount()));
+ write(nextColumnAlignCenter);
+ if (session.isSerializable()) {
+ write(""#oui#"");
+ } else {
+ write(""#non# "");
+ }
+ write(nextColumnAlignRight);
+ write(integerFormat.format(session.getSerializedSize()));
+ final String nextColumn = """";
+ write(nextColumn);
+ final String remoteAddr = session.getRemoteAddr();
+ if (remoteAddr == null) {
+ write("" "");
+ } else {
+ write(remoteAddr);
+ }
+ write(nextColumnAlignCenter);
+ writeCountry(session);
+ if (displayUser) {
+ write(nextColumn);
+ final String remoteUser = session.getRemoteUser();
+ if (remoteUser == null) {
+ write("" "");
+ } else {
+ writeDirectly(htmlEncodeButNotSpace(remoteUser));
+ }
+ }
+ write("" "");
+ write(A_HREF_PART_SESSIONS);
+ write(""&action=invalidate_session&sessionId="");
+ write(urlEncode(session.getId()));
+ write(""' onclick=\""javascript:return confirm('""
+ + getStringForJavascript(""confirm_invalidate_session"") + ""');\"">"");
+ write("" "");
+ write("""");
+ write("" "");
+ }
+
+"
+1," public boolean isUseRouteBuilder() {
+ return false;
+ }
+
+ @Test
+"
+1," private T run(PrivilegedAction action) {
+ return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run();
+ }
+
+ // JAXB closes the underlying input stream
+"
+1," public void execute(FunctionContext context) {
+ RegionConfiguration configuration = (RegionConfiguration) context.getArguments();
+ if (this.cache.getLogger().fineEnabled()) {
+ StringBuilder builder = new StringBuilder();
+ builder.append(""Function "").append(ID).append("" received request: "").append(configuration);
+ this.cache.getLogger().fine(builder.toString());
+ }
+
+ // Create or retrieve region
+ RegionStatus status = createOrRetrieveRegion(configuration);
+
+ // Dump XML
+ if (DUMP_SESSION_CACHE_XML) {
+ writeCacheXml();
+ }
+ // Return status
+ context.getResultSender().lastResult(status);
+ }
+
+"
+1," public static int methodUrl(String path, ByteChunk out, int readTimeout,
+ Map> reqHead,
+ Map> resHead,
+ String method) throws IOException {
+
+ URL url = new URL(path);
+ HttpURLConnection connection =
+ (HttpURLConnection) url.openConnection();
+ connection.setUseCaches(false);
+ connection.setReadTimeout(readTimeout);
+ connection.setRequestMethod(method);
+ if (reqHead != null) {
+ for (Map.Entry> entry : reqHead.entrySet()) {
+ StringBuilder valueList = new StringBuilder();
+ for (String value : entry.getValue()) {
+ if (valueList.length() > 0) {
+ valueList.append(',');
+ }
+ valueList.append(value);
+ }
+ connection.setRequestProperty(entry.getKey(),
+ valueList.toString());
+ }
+ }
+ connection.connect();
+ int rc = connection.getResponseCode();
+ if (resHead != null) {
+ Map> head = connection.getHeaderFields();
+ resHead.putAll(head);
+ }
+ InputStream is;
+ if (rc < 400) {
+ is = connection.getInputStream();
+ } else {
+ is = connection.getErrorStream();
+ }
+ if (is != null) {
+ try (BufferedInputStream bis = new BufferedInputStream(is)) {
+ byte[] buf = new byte[2048];
+ int rd = 0;
+ while((rd = bis.read(buf)) > 0) {
+ out.append(buf, 0, rd);
+ }
+ }
+ }
+ return rc;
+ }
+
+"
+1," public void addRecipients(final ExtendedEmailPublisherContext context, final EnvVars env,
+ final Set to, final Set cc, final Set bcc) {
+
+ final class Debug implements RecipientProviderUtilities.IDebug {
+ private final ExtendedEmailPublisherDescriptor descriptor
+ = Jenkins.getActiveInstance().getDescriptorByType(ExtendedEmailPublisherDescriptor.class);
+
+ private final PrintStream logger = context.getListener().getLogger();
+
+ public void send(final String format, final Object... args) {
+ descriptor.debug(logger, format, args);
+ }
+ }
+ final Debug debug = new Debug();
+
+ Set users = null;
+
+ final Run, ?> currentRun = context.getRun();
+ if (currentRun == null) {
+ debug.send(""currentRun was null"");
+ } else {
+ final AbstractTestResultAction> testResultAction = currentRun.getAction(AbstractTestResultAction.class);
+ if (testResultAction == null) {
+ debug.send(""testResultAction was null"");
+ } else {
+ if (testResultAction.getFailCount() <= 0) {
+ debug.send(""getFailCount() returned <= 0"");
+ } else {
+ users = new HashSet<>();
+ debug.send(""Collecting builds where a test started failing..."");
+ final HashSet> buildsWhereATestStartedFailing = new HashSet<>();
+ for (final TestResult caseResult : testResultAction.getFailedTests()) {
+ final Run, ?> runWhereTestStartedFailing = caseResult.getFailedSinceRun();
+ if (runWhereTestStartedFailing != null) {
+ debug.send("" runWhereTestStartedFailing: %d"", runWhereTestStartedFailing.getNumber());
+ buildsWhereATestStartedFailing.add(runWhereTestStartedFailing);
+ } else {
+ context.getListener().error(""getFailedSinceRun returned null for %s"", caseResult.getFullDisplayName());
+ }
+ }
+ // For each build where a test started failing, walk backward looking for build results worse than
+ // UNSTABLE. All of those builds will be used to find suspects.
+ debug.send(""Collecting builds with suspects..."");
+ final HashSet> buildsWithSuspects = new HashSet<>();
+ for (final Run, ?> buildWhereATestStartedFailing : buildsWhereATestStartedFailing) {
+ debug.send("" buildWhereATestStartedFailing: %d"", buildWhereATestStartedFailing.getNumber());
+ buildsWithSuspects.add(buildWhereATestStartedFailing);
+ Run, ?> previousBuildToCheck = buildWhereATestStartedFailing.getPreviousCompletedBuild();
+ if (previousBuildToCheck != null) {
+ debug.send("" previousBuildToCheck: %d"", previousBuildToCheck.getNumber());
+ }
+ while (previousBuildToCheck != null) {
+ if (buildsWithSuspects.contains(previousBuildToCheck)) {
+ // Short-circuit if the build to check has already been checked.
+ debug.send("" already contained in buildsWithSuspects; stopping search"");
+ break;
+ }
+ final Result previousResult = previousBuildToCheck.getResult();
+ if (previousResult == null) {
+ debug.send("" previousResult was null"");
+ } else {
+ debug.send("" previousResult: %s"", previousResult.toString());
+ if (previousResult.isBetterThan(Result.FAILURE)) {
+ debug.send("" previousResult was better than FAILURE; stopping search"");
+ break;
+ } else {
+ debug.send("" previousResult was not better than FAILURE; adding to buildsWithSuspects; continuing search"");
+ buildsWithSuspects.add(previousBuildToCheck);
+ previousBuildToCheck = previousBuildToCheck.getPreviousCompletedBuild();
+ if (previousBuildToCheck != null) {
+ debug.send("" previousBuildToCheck: %d"", previousBuildToCheck.getNumber());
+ }
+ }
+ }
+ }
+ }
+ debug.send(""Collecting suspects..."");
+ users.addAll(RecipientProviderUtilities.getChangeSetAuthors(buildsWithSuspects, debug));
+ users.addAll(RecipientProviderUtilities.getUsersTriggeringTheBuilds(buildsWithSuspects, debug));
+ }
+ }
+ }
+
+ if (users != null) {
+ RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug);
+ }
+ }
+
+ @Extension
+"
+1," public static DateTimeZone randomDateTimeZone() {
+ DateTimeZone timeZone;
+
+ // It sounds like some Java Time Zones are unknown by JODA. For example: Asia/Riyadh88
+ // We need to fallback in that case to a known time zone
+ try {
+ timeZone = DateTimeZone.forTimeZone(randomTimeZone());
+ } catch (IllegalArgumentException e) {
+ timeZone = DateTimeZone.forOffsetHours(randomIntBetween(-12, 12));
+ }
+
+ return timeZone;
+ }
+
+"
+1," public void testTriggerWithLockedDownInstance() throws Exception {
+ FreeStyleProject project = DuplicatesUtil.createGerritTriggeredJob(j, projectName);
+
+ Setup.lockDown(j);
+
+ GerritTrigger trigger = project.getTrigger(GerritTrigger.class);
+ trigger.setSilentStartMode(false);
+
+ GerritServer gerritServer = new GerritServer(PluginImpl.DEFAULT_SERVER_NAME);
+ SshdServerMock.configureFor(sshd, gerritServer);
+ PluginImpl.getInstance().addServer(gerritServer);
+ gerritServer.getConfig().setNumberOfSendingWorkerThreads(NUMBEROFSENDERTHREADS);
+ ((Config)gerritServer.getConfig()).setGerritAuthKeyFile(sshKey.getPrivateKey());
+ gerritServer.start();
+
+ gerritServer.triggerEvent(Setup.createPatchsetCreated());
+
+ TestUtils.waitForBuilds(project, 1);
+ //wait until command is registered
+ // CS IGNORE MagicNumber FOR NEXT 2 LINES. REASON: ConstantsNotNeeded
+ Thread.sleep(TimeUnit.SECONDS.toMillis(10));
+ assertEquals(2, server.getNrCommandsHistory(""gerrit review.*""));
+
+ FreeStyleBuild buildOne = project.getLastCompletedBuild();
+ assertSame(Result.SUCCESS, buildOne.getResult());
+ assertEquals(1, project.getLastCompletedBuild().getNumber());
+ assertSame(PluginImpl.DEFAULT_SERVER_NAME,
+ buildOne.getCause(GerritCause.class).getEvent().getProvider().getName());
+
+ }
+"
+1," public static Transformer cloneTransformer() {
+ return (Transformer) INSTANCE;
+ }
+
+ /**
+ * Constructor.
+ */
+"
+1," public void setEnabled(boolean enabled);
+
+"
+1," public void testHttpSendStringAndReceiveJavaBody() throws Exception {
+ context.addRoutes(new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ from(""jetty:http://localhost:{{port}}/myapp/myservice"")
+ .process(new Processor() {
+ public void process(Exchange exchange) throws Exception {
+ String body = exchange.getIn().getBody(String.class);
+ assertNotNull(body);
+ assertEquals(""Hello World"", body);
+
+ MyCoolBean reply = new MyCoolBean(456, ""Camel rocks"");
+ exchange.getOut().setBody(reply);
+ exchange.getOut().setHeader(Exchange.CONTENT_TYPE, HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT);
+ }
+ });
+ }
+ });
+ context.start();
+
+ MyCoolBean reply = template.requestBody(""http://localhost:{{port}}/myapp/myservice"", ""Hello World"", MyCoolBean.class);
+
+ assertEquals(456, reply.getId());
+ assertEquals(""Camel rocks"", reply.getName());
+ }
+
+"
+1," private boolean evaluate(String text) {
+ try {
+ InputSource inputSource = new InputSource(new StringReader(text));
+ return ((Boolean)expression.evaluate(inputSource, XPathConstants.BOOLEAN)).booleanValue();
+ } catch (XPathExpressionException e) {
+ return false;
+ }
+ }
+
+ @Override
+"
+1," protected boolean isAccepted(String paramName) {
+ if (!this.acceptParams.isEmpty()) {
+ for (Pattern pattern : acceptParams) {
+ Matcher matcher = pattern.matcher(paramName);
+ if (matcher.matches()) {
+ return true;
+ }
+ }
+ return false;
+ } else
+ return acceptedPattern.matcher(paramName).matches();
+ }
+
+"
+1," public void loadConfig(Class>... configs) {
+ this.context = new AnnotationConfigWebApplicationContext();
+ this.context.register(configs);
+ this.context.refresh();
+
+ this.context.getAutowireCapableBeanFactory().autowireBean(this);
+ }
+"
+1," private CoderResult decodeHasArray(ByteBuffer in, CharBuffer out) {
+ int outRemaining = out.remaining();
+ int pos = in.position();
+ int limit = in.limit();
+ final byte[] bArr = in.array();
+ final char[] cArr = out.array();
+ final int inIndexLimit = limit + in.arrayOffset();
+ int inIndex = pos + in.arrayOffset();
+ int outIndex = out.position() + out.arrayOffset();
+ // if someone would change the limit in process,
+ // he would face consequences
+ for (; inIndex < inIndexLimit && outRemaining > 0; inIndex++) {
+ int jchar = bArr[inIndex];
+ if (jchar < 0) {
+ jchar = jchar & 0x7F;
+ int tail = remainingBytes[jchar];
+ if (tail == -1) {
+ in.position(inIndex - in.arrayOffset());
+ out.position(outIndex - out.arrayOffset());
+ return CoderResult.malformedForLength(1);
+ }
+ if (inIndexLimit - inIndex < 1 + tail) {
+ // Apache Tomcat added test - detects invalid sequence as
+ // early as possible
+ if (jchar == 0x74 && inIndexLimit > inIndex + 1) {
+ if ((bArr[inIndex + 1] & 0xFF) > 0x8F) {
+ return CoderResult.unmappableForLength(4);
+ }
+ }
+ break;
+ }
+ for (int i = 0; i < tail; i++) {
+ int nextByte = bArr[inIndex + i + 1] & 0xFF;
+ if ((nextByte & 0xC0) != 0x80) {
+ in.position(inIndex - in.arrayOffset());
+ out.position(outIndex - out.arrayOffset());
+ return CoderResult.malformedForLength(1 + i);
+ }
+ jchar = (jchar << 6) + nextByte;
+ }
+ jchar -= remainingNumbers[tail];
+ if (jchar < lowerEncodingLimit[tail]) {
+ // Should have been encoded in fewer octets
+ in.position(inIndex - in.arrayOffset());
+ out.position(outIndex - out.arrayOffset());
+ return CoderResult.malformedForLength(1);
+ }
+ inIndex += tail;
+ }
+ // Apache Tomcat added test
+ if (jchar >= 0xD800 && jchar <= 0xDFFF) {
+ return CoderResult.unmappableForLength(3);
+ }
+ // Apache Tomcat added test
+ if (jchar > 0x10FFFF) {
+ return CoderResult.unmappableForLength(4);
+ }
+ if (jchar <= 0xffff) {
+ cArr[outIndex++] = (char) jchar;
+ outRemaining--;
+ } else {
+ if (outRemaining < 2) {
+ return CoderResult.OVERFLOW;
+ }
+ cArr[outIndex++] = (char) ((jchar >> 0xA) + 0xD7C0);
+ cArr[outIndex++] = (char) ((jchar & 0x3FF) + 0xDC00);
+ outRemaining -= 2;
+ }
+ }
+ in.position(inIndex - in.arrayOffset());
+ out.position(outIndex - out.arrayOffset());
+ return (outRemaining == 0 && inIndex < inIndexLimit) ? CoderResult.OVERFLOW
+ : CoderResult.UNDERFLOW;
+ }
+"
+1," public void testBogusPathCheck() {
+ TesseractOCRConfig config = new TesseractOCRConfig();
+ config.setTesseractPath(""blahdeblahblah"");
+ assertEquals(""blahdeblahblah"", config.getTesseractPath());
+ }
+
+
+"
+1," public void setValues(PreparedStatement ps) throws SQLException {
+ Timestamp t = new Timestamp(new Date().getTime());
+ ps.setTimestamp(1, t);
+ ps.setString(2, encNewPassword);
+ ps.setTimestamp(3, t);
+ ps.setString(4, id);
+ }
+ });
+"
+1," public boolean isTransferException() {
+ return transferException;
+ }
+
+ /**
+ * If enabled and an Exchange failed processing on the consumer side, and if the caused Exception was send back serialized
+ * in the response as a application/x-java-serialized-object content type.
+ */
+"
+1," public void testSingletonPatternInSerialization() {
+ final Object[] singletones = new Object[] {
+ CloneTransformer.INSTANCE,
+ ExceptionTransformer.INSTANCE,
+ NOPTransformer.INSTANCE,
+ StringValueTransformer.stringValueTransformer(),
+ };
+
+ for (final Object original : singletones) {
+ TestUtils.assertSameAfterSerialization(""Singleton pattern broken for "" + original.getClass(), original);
+ }
+ }
+
+"
+1," public static void main(
+ String[] args)
+ {
+ Security.addProvider(new BouncyCastleProvider());
+
+ runTest(new ECDSA5Test());
+ }
+"
+1," public synchronized void afterTest() throws IOException {
+ wipeDataDirectories();
+ randomlyResetClients(); /* reset all clients - each test gets its own client based on the Random instance created above. */
+ }
+
+ @Override
+"
+1," public void init(FilterConfig conf) throws ServletException {
+ if (conf != null && ""zookeeper"".equals(conf.getInitParameter(""signer.secret.provider""))) {
+ SolrZkClient zkClient =
+ (SolrZkClient)conf.getServletContext().getAttribute(DELEGATION_TOKEN_ZK_CLIENT);
+ conf.getServletContext().setAttribute(""signer.secret.provider.zookeeper.curator.client"",
+ getCuratorClient(zkClient));
+ }
+ super.init(conf);
+ }
+
+ @Override
+"
+1," protected void setAuthenticateHeader(HttpServletRequest request,
+ HttpServletResponse response,
+ LoginConfig config,
+ String nOnce) {
+
+ // Get the realm name
+ String realmName = config.getRealmName();
+ if (realmName == null)
+ realmName = REALM_NAME;
+
+ byte[] buffer = null;
+ synchronized (md5Helper) {
+ buffer = md5Helper.digest(nOnce.getBytes());
+ }
+
+ String authenticateHeader = ""Digest realm=\"""" + realmName + ""\"", ""
+ + ""qop=\""auth\"", nonce=\"""" + nOnce + ""\"", "" + ""opaque=\""""
+ + md5Encoder.encode(buffer) + ""\"""";
+ response.setHeader(AUTH_HEADER_NAME, authenticateHeader);
+
+ }
+
+
+"
+1," public static Object deserialize(byte[] data)
+ throws IOException, ClassNotFoundException
+ {
+ ByteArrayInputStream in = new ByteArrayInputStream(data);
+ ObjectInputStream is = new ObjectInputStream(in);
+ return is.readObject();
+ }
+
+"
+1," public FormValidation doCheckCommand(@QueryParameter String value) {
+ if(Util.fixEmptyAndTrim(value)==null)
+ return FormValidation.error(Messages.CommandLauncher_NoLaunchCommand());
+ else
+ return FormValidation.ok();
+ }
+ }
+}
+"
+1," private static void addUserTriggeringTheBuild(Run, ?> run, Set to,
+ Set cc, Set bcc, EnvVars env, TaskListener listener, RecipientProviderUtilities.IDebug debug) {
+
+ final User user = RecipientProviderUtilities.getUserTriggeringTheBuild(run);
+ if (user != null) {
+ RecipientProviderUtilities.addUsers(Collections.singleton(user), listener, env, to, cc, bcc, debug);
+ }
+ }
+
+ @SuppressWarnings(""unchecked"")
+
+
+ @Extension
+"
+1," protected void finish() throws IOException {
+
+ if (!response.isCommitted()) {
+ // Validate and write response headers
+ try {
+ prepareResponse();
+ } catch (IOException e) {
+ // Set error flag
+ error = true;
+ }
+ }
+
+ if (finished)
+ return;
+
+ finished = true;
+
+ // Add the end message
+ if (error) {
+ output(endAndCloseMessageArray, 0, endAndCloseMessageArray.length);
+ } else {
+ output(endMessageArray, 0, endMessageArray.length);
+ }
+ }
+
+
+ // ------------------------------------- InputStreamInputBuffer Inner Class
+
+
+ /**
+ * This class is an input buffer which will read its data from an input
+ * stream.
+ */
+"
+1," abstract protected JDBCTableReader getTableReader(Connection connection, String tableName, ParseContext parseContext);
+
+"
+1," public void testEntityExpansionWReq() throws Exception {
+ String url = ""https://localhost:"" + getIdpHttpsPort() + ""/fediz-idp/federation?"";
+ url += ""wa=wsignin1.0"";
+ url += ""&whr=urn:org:apache:cxf:fediz:idp:realm-A"";
+ url += ""&wtrealm=urn:org:apache:cxf:fediz:fedizhelloworld"";
+ String wreply = ""https://localhost:"" + getRpHttpsPort() + ""/"" + getServletContextName() + ""/secure/fedservlet"";
+ url += ""&wreply="" + wreply;
+
+ InputStream is = this.getClass().getClassLoader().getResource(""entity_wreq.xml"").openStream();
+ String entity = IOUtils.toString(is, ""UTF-8"");
+ is.close();
+ String validWreq =
+ """"
+ + ""&m;http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.1#SAMLV2.0 ""
+ + "" "";
+
+ url += ""&wreq="" + URLEncoder.encode(entity + validWreq, ""UTF-8"");
+
+ String user = ""alice"";
+ String password = ""ecila"";
+
+ final WebClient webClient = new WebClient();
+ webClient.getOptions().setUseInsecureSSL(true);
+ webClient.getCredentialsProvider().setCredentials(
+ new AuthScope(""localhost"", Integer.parseInt(getIdpHttpsPort())),
+ new UsernamePasswordCredentials(user, password));
+
+ webClient.getOptions().setJavaScriptEnabled(false);
+ try {
+ webClient.getPage(url);
+ Assert.fail(""Failure expected on a bad wreq value"");
+ } catch (FailingHttpStatusCodeException ex) {
+ Assert.assertEquals(ex.getStatusCode(), 400);
+ }
+
+ webClient.close();
+ }
+
+ // Send an malformed wreq value
+ @org.junit.Test
+"
+1," public static void checkSlip(File parentFile, File file) throws IllegalArgumentException {
+ String parentCanonicalPath;
+ String canonicalPath;
+ try {
+ parentCanonicalPath = parentFile.getCanonicalPath();
+ canonicalPath = file.getCanonicalPath();
+ } catch (IOException e) {
+ throw new IORuntimeException(e);
+ }
+ if (false == canonicalPath.startsWith(parentCanonicalPath)) {
+ throw new IllegalArgumentException(""New file is outside of the parent dir: "" + file.getName());
+ }
+ }
+"
+1," public ExitCode runWithoutHelp(CommandRunnerParams params)
+ throws IOException, InterruptedException {
+
+ if (saveFilename != null && loadFilename != null) {
+ params.getConsole().printErrorText(""Can't use both --load and --save"");
+ return ExitCode.COMMANDLINE_ERROR;
+ }
+
+ if (saveFilename != null) {
+ invalidateChanges(params);
+ RemoteDaemonicParserState state = params.getParser().storeParserState(params.getCell());
+ try (FileOutputStream fos = new FileOutputStream(saveFilename);
+ ZipOutputStream zipos = new ZipOutputStream(fos)) {
+ zipos.putNextEntry(new ZipEntry(""parser_data""));
+ try (ObjectOutputStream oos = new ObjectOutputStream(zipos)) {
+ oos.writeObject(state);
+ }
+ }
+ } else if (loadFilename != null) {
+ try (FileInputStream fis = new FileInputStream(loadFilename);
+ ZipInputStream zipis = new ZipInputStream(fis)) {
+ ZipEntry entry = zipis.getNextEntry();
+ Preconditions.checkState(entry.getName().equals(""parser_data""));
+ try (ObjectInputStream ois = new ObjectInputStream(zipis)) {
+ RemoteDaemonicParserState state;
+ try {
+ state = (RemoteDaemonicParserState) ois.readObject();
+ } catch (ClassNotFoundException e) {
+ params.getConsole().printErrorText(""Invalid file format"");
+ return ExitCode.COMMANDLINE_ERROR;
+ }
+ params.getParser().restoreParserState(state, params.getCell());
+ }
+ }
+ invalidateChanges(params);
+
+ ParserConfig configView = params.getBuckConfig().getView(ParserConfig.class);
+ if (configView.isParserCacheMutationWarningEnabled()) {
+ params
+ .getConsole()
+ .printErrorText(
+ params
+ .getConsole()
+ .getAnsi()
+ .asWarningText(
+ ""WARNING: Buck injected a parser state that may not match the local state.""));
+ }
+ }
+
+ return ExitCode.SUCCESS;
+ }
+
+"
+1," public BeanDefinition createMatcher(String path, String method) {
+ if ((""/**"".equals(path) || ""**"".equals(path)) && method == null) {
+ return new RootBeanDefinition(AnyRequestMatcher.class);
+ }
+
+ BeanDefinitionBuilder matcherBldr = BeanDefinitionBuilder
+ .rootBeanDefinition(type);
+
+ matcherBldr.addConstructorArgValue(path);
+ matcherBldr.addConstructorArgValue(method);
+
+ if (this == ciRegex) {
+ matcherBldr.addConstructorArgValue(true);
+ }
+
+ return matcherBldr.getBeanDefinition();
+ }
+
+"
+1," public Collection parse(final InputStream file, final String moduleName)
+ throws InvocationTargetException {
+ try {
+ Digester digester = new Digester();
+ digester.setValidating(false);
+ digester.setClassLoader(LintParser.class.getClassLoader());
+
+ List issues = new ArrayList();
+ digester.push(issues);
+
+ String issueXPath = ""issues/issue"";
+ digester.addObjectCreate(issueXPath, LintIssue.class);
+ digester.addSetProperties(issueXPath);
+ digester.addSetNext(issueXPath, ""add"");
+
+ String locationXPath = issueXPath + ""/location"";
+ digester.addObjectCreate(locationXPath, Location.class);
+ digester.addSetProperties(locationXPath);
+ digester.addSetNext(locationXPath, ""addLocation"", Location.class.getName());
+
+ digester.parse(file);
+
+ return convert(issues, moduleName);
+ } catch (IOException exception) {
+ throw new InvocationTargetException(exception);
+ } catch (SAXException exception) {
+ throw new InvocationTargetException(exception);
+ }
+ }
+
+ /**
+ * Converts the Lint object structure to that of the analysis-core API.
+ *
+ * @param issues The parsed Lint issues.
+ * @param moduleName Name of the maven module, if any.
+ * @return A collection of the discovered issues.
+ */
+"
+1," public List removeServer(GerritServer s) {
+ servers.remove(s);
+ return servers;
+ }
+
+
+ /**
+ * Check whether the list of servers contains a GerritServer object of a specific name.
+ *
+ * @param serverName to check.
+ * @return whether the list contains a server with the given name.
+ */
+"
+1," public void configure() throws Exception {
+ from(""direct:start"")
+ .to(""xslt:org/apache/camel/component/xslt/transform.xsl"")
+ .multicast()
+ .beanRef(""testBean"")
+ .to(""mock:result"");
+ }
+ };
+ }
+
+ @Override
+"
+1," public void removeUser(String username) {
+
+ UserDatabase database = (UserDatabase) this.resource;
+ User user = database.findUser(username);
+ if (user == null) {
+ return;
+ }
+ try {
+ MBeanUtils.destroyMBean(user);
+ database.removeUser(user);
+ } catch (Exception e) {
+ IllegalArgumentException iae = new IllegalArgumentException
+ (""Exception destroying user "" + user + "" MBean"");
+ iae.initCause(e);
+ throw iae;
+ }
+
+ }
+
+
+"
+1," public void execute(FunctionContext context) {
+ RegionFunctionContext rfc = (RegionFunctionContext) context;
+ context.getResultSender().lastResult(rfc.getDataSet().size());
+ }
+
+"
+1," public void execute(FunctionContext context) {
+ Object[] arguments = (Object[]) context.getArguments();
+ String regionName = (String) arguments[0];
+ Set keys = (Set) arguments[1];
+ if (this.cache.getLogger().fineEnabled()) {
+ StringBuilder builder = new StringBuilder();
+ builder.append(""Function "").append(ID).append("" received request to touch "")
+ .append(regionName).append(""->"").append(keys);
+ this.cache.getLogger().fine(builder.toString());
+ }
+
+ // Retrieve the appropriate Region and value to update the lastAccessedTime
+ Region region = this.cache.getRegion(regionName);
+ if (region != null) {
+ region.getAll(keys);
+ }
+
+ // Return result to get around NPE in LocalResultCollectorImpl
+ context.getResultSender().lastResult(true);
+ }
+
+"
+1," protected Object extractResponseBody(Exchange exchange, JettyContentExchange httpExchange) throws IOException {
+ Map headers = getSimpleMap(httpExchange.getResponseHeaders());
+ String contentType = headers.get(Exchange.CONTENT_TYPE);
+
+ // if content type is serialized java object, then de-serialize it to a Java object
+ if (contentType != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(contentType)) {
+ try {
+ InputStream is = exchange.getContext().getTypeConverter().mandatoryConvertTo(InputStream.class, httpExchange.getResponseContentBytes());
+ return HttpHelper.deserializeJavaObjectFromStream(is, exchange.getContext());
+ } catch (Exception e) {
+ throw new RuntimeCamelException(""Cannot deserialize body to Java object"", e);
+ }
+ } else {
+ // just grab the raw content body
+ return httpExchange.getBody();
+ }
+ }
+
+"
+1," public int doRead(ByteChunk chunk, Request req) throws IOException {
+
+ if (endChunk) {
+ return -1;
+ }
+
+ if(needCRLFParse) {
+ needCRLFParse = false;
+ parseCRLF(false);
+ }
+
+ if (remaining <= 0) {
+ if (!parseChunkHeader()) {
+ throw new IOException(""Invalid chunk header"");
+ }
+ if (endChunk) {
+ parseEndChunk();
+ return -1;
+ }
+ }
+
+ int result = 0;
+
+ if (pos >= lastValid) {
+ if (readBytes() < 0) {
+ throw new IOException(
+ ""Unexpected end of stream whilst reading request body"");
+ }
+ }
+
+ if (remaining > (lastValid - pos)) {
+ result = lastValid - pos;
+ remaining = remaining - result;
+ chunk.setBytes(buf, pos, result);
+ pos = lastValid;
+ } else {
+ result = remaining;
+ chunk.setBytes(buf, pos, remaining);
+ pos = pos + remaining;
+ remaining = 0;
+ //we need a CRLF
+ if ((pos+1) >= lastValid) {
+ //if we call parseCRLF we overrun the buffer here
+ //so we defer it to the next call BZ 11117
+ needCRLFParse = true;
+ } else {
+ parseCRLF(false); //parse the CRLF immediately
+ }
+ }
+
+ return result;
+ }
+
+
+ // ---------------------------------------------------- InputFilter Methods
+
+ /**
+ * Read the content length from the request.
+ */
+ @Override
+"
+1," public FormValidation doNameFreeCheck(
+ @QueryParameter(""value"")
+ final String value) {
+ if (!value.equals(name)) {
+ if (PluginImpl.containsServer_(value)) {
+ return FormValidation.error(""The server name "" + value + "" is already in use!"");
+ } else if (ANY_SERVER.equals(value)) {
+ return FormValidation.error(""Illegal name "" + value + ""!"");
+ } else {
+ return FormValidation.warning(""The server "" + name + "" will be renamed"");
+ }
+ } else {
+ return FormValidation.ok();
+ }
+ }
+
+ /**
+ * Generates a list of helper objects for the jelly view.
+ *
+ * @return a list of helper objects.
+ */
+"
+1," public void testPrivateKeyParsingSHA256()
+ throws IOException, ClassNotFoundException
+ {
+ XMSSMTParameters params = new XMSSMTParameters(20, 10, new SHA256Digest());
+ XMSSMT mt = new XMSSMT(params, new SecureRandom());
+ mt.generateKeys();
+ byte[] privateKey = mt.exportPrivateKey();
+ byte[] publicKey = mt.exportPublicKey();
+
+ mt.importState(privateKey, publicKey);
+
+ assertTrue(Arrays.areEqual(privateKey, mt.exportPrivateKey()));
+ }
+"
+1," public void addOtherTesseractConfig(String key, String value) {
+ if (key == null) {
+ throw new IllegalArgumentException(""key must not be null"");
+ }
+ if (value == null) {
+ throw new IllegalArgumentException(""value must not be null"");
+ }
+
+ Matcher m = ALLOWABLE_OTHER_PARAMS_PATTERN.matcher(key);
+ if (! m.find()) {
+ throw new IllegalArgumentException(""Value contains illegal characters: ""+key);
+ }
+ m.reset(value);
+ if (! m.find()) {
+ throw new IllegalArgumentException(""Value contains illegal characters: ""+value);
+ }
+
+ otherTesseractConfig.put(key.trim(), value.trim());
+ }
+
+ /**
+ * Get property from the properties file passed in.
+ *
+ * @param properties properties file to read from.
+ * @param property the property to fetch.
+ * @param defaultMissing default parameter to use.
+ * @return the value.
+ */
+"
+1," public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream, UpdateRequestProcessor processor) throws Exception {
+ final String charset = ContentStreamBase.getCharsetFromContentType(stream.getContentType());
+
+ InputStream is = null;
+ XMLStreamReader parser = null;
+
+ String tr = req.getParams().get(CommonParams.TR,null);
+ if(tr!=null) {
+ Transformer t = getTransformer(tr,req);
+ final DOMResult result = new DOMResult();
+
+ // first step: read XML and build DOM using Transformer (this is no overhead, as XSL always produces
+ // an internal result DOM tree, we just access it directly as input for StAX):
+ try {
+ is = stream.getStream();
+ final InputSource isrc = new InputSource(is);
+ isrc.setEncoding(charset);
+ final SAXSource source = new SAXSource(isrc);
+ t.transform(source, result);
+ } catch(TransformerException te) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, te.getMessage(), te);
+ } finally {
+ IOUtils.closeQuietly(is);
+ }
+ // second step feed the intermediate DOM tree into StAX parser:
+ try {
+ parser = inputFactory.createXMLStreamReader(new DOMSource(result.getNode()));
+ this.processUpdate(req, processor, parser);
+ } catch (XMLStreamException e) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
+ } finally {
+ if (parser != null) parser.close();
+ }
+ }
+ // Normal XML Loader
+ else {
+ try {
+ is = stream.getStream();
+ if (UpdateRequestHandler.log.isTraceEnabled()) {
+ final byte[] body = IOUtils.toByteArray(is);
+ // TODO: The charset may be wrong, as the real charset is later
+ // determined by the XML parser, the content-type is only used as a hint!
+ UpdateRequestHandler.log.trace(""body"", new String(body, (charset == null) ?
+ ContentStreamBase.DEFAULT_CHARSET : charset));
+ IOUtils.closeQuietly(is);
+ is = new ByteArrayInputStream(body);
+ }
+ parser = (charset == null) ?
+ inputFactory.createXMLStreamReader(is) : inputFactory.createXMLStreamReader(is, charset);
+ this.processUpdate(req, processor, parser);
+ } catch (XMLStreamException e) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
+ } finally {
+ if (parser != null) parser.close();
+ IOUtils.closeQuietly(is);
+ }
+ }
+ }
+
+
+ /** Get Transformer from request context, or from TransformerProvider.
+ * This allows either getContentType(...) or write(...) to instantiate the Transformer,
+ * depending on which one is called first, then the other one reuses the same Transformer
+ */
+"
+1," private SSLEngine createSSLEngine(Map userProperties)
+ throws DeploymentException {
+
+ try {
+ // See if a custom SSLContext has been provided
+ SSLContext sslContext =
+ (SSLContext) userProperties.get(Constants.SSL_CONTEXT_PROPERTY);
+
+ if (sslContext == null) {
+ // Create the SSL Context
+ sslContext = SSLContext.getInstance(""TLS"");
+
+ // Trust store
+ String sslTrustStoreValue =
+ (String) userProperties.get(Constants.SSL_TRUSTSTORE_PROPERTY);
+ if (sslTrustStoreValue != null) {
+ String sslTrustStorePwdValue = (String) userProperties.get(
+ Constants.SSL_TRUSTSTORE_PWD_PROPERTY);
+ if (sslTrustStorePwdValue == null) {
+ sslTrustStorePwdValue = Constants.SSL_TRUSTSTORE_PWD_DEFAULT;
+ }
+
+ File keyStoreFile = new File(sslTrustStoreValue);
+ KeyStore ks = KeyStore.getInstance(""JKS"");
+ try (InputStream is = new FileInputStream(keyStoreFile)) {
+ ks.load(is, sslTrustStorePwdValue.toCharArray());
+ }
+
+ TrustManagerFactory tmf = TrustManagerFactory.getInstance(
+ TrustManagerFactory.getDefaultAlgorithm());
+ tmf.init(ks);
+
+ sslContext.init(null, tmf.getTrustManagers(), null);
+ } else {
+ sslContext.init(null, null, null);
+ }
+ }
+
+ SSLEngine engine = sslContext.createSSLEngine();
+
+ String sslProtocolsValue =
+ (String) userProperties.get(Constants.SSL_PROTOCOLS_PROPERTY);
+ if (sslProtocolsValue != null) {
+ engine.setEnabledProtocols(sslProtocolsValue.split("",""));
+ }
+
+ engine.setUseClientMode(true);
+
+ return engine;
+ } catch (Exception e) {
+ throw new DeploymentException(sm.getString(
+ ""wsWebSocketContainer.sslEngineFail""), e);
+ }
+ }
+
+
+ @Override
+"
+1," private AsymmetricCipherKeyPair genKeyPair()
+ {
+ if (!initialized)
+ {
+ initializeDefault();
+ }
+
+ // initialize authenticationPaths and treehash instances
+ byte[][][] currentAuthPaths = new byte[numLayer][][];
+ byte[][][] nextAuthPaths = new byte[numLayer - 1][][];
+ Treehash[][] currentTreehash = new Treehash[numLayer][];
+ Treehash[][] nextTreehash = new Treehash[numLayer - 1][];
+
+ Vector[] currentStack = new Vector[numLayer];
+ Vector[] nextStack = new Vector[numLayer - 1];
+
+ Vector[][] currentRetain = new Vector[numLayer][];
+ Vector[][] nextRetain = new Vector[numLayer - 1][];
+
+ for (int i = 0; i < numLayer; i++)
+ {
+ currentAuthPaths[i] = new byte[heightOfTrees[i]][mdLength];
+ currentTreehash[i] = new Treehash[heightOfTrees[i] - K[i]];
+
+ if (i > 0)
+ {
+ nextAuthPaths[i - 1] = new byte[heightOfTrees[i]][mdLength];
+ nextTreehash[i - 1] = new Treehash[heightOfTrees[i] - K[i]];
+ }
+
+ currentStack[i] = new Vector();
+ if (i > 0)
+ {
+ nextStack[i - 1] = new Vector();
+ }
+ }
+
+ // initialize roots
+ byte[][] currentRoots = new byte[numLayer][mdLength];
+ byte[][] nextRoots = new byte[numLayer - 1][mdLength];
+ // initialize seeds
+ byte[][] seeds = new byte[numLayer][mdLength];
+ // initialize seeds[] by copying starting-seeds of first trees of each
+ // layer
+ for (int i = 0; i < numLayer; i++)
+ {
+ System.arraycopy(currentSeeds[i], 0, seeds[i], 0, mdLength);
+ }
+
+ // initialize rootSigs
+ currentRootSigs = new byte[numLayer - 1][mdLength];
+
+ // -------------------------
+ // -------------------------
+ // --- calculation of current authpaths and current rootsigs (AUTHPATHS,
+ // SIG)------
+ // from bottom up to the root
+ for (int h = numLayer - 1; h >= 0; h--)
+ {
+ GMSSRootCalc tree = new GMSSRootCalc(this.heightOfTrees[h], this.K[h], digestProvider);
+ try
+ {
+ // on lowest layer no lower root is available, so just call
+ // the method with null as first parameter
+ if (h == numLayer - 1)
+ {
+ tree = this.generateCurrentAuthpathAndRoot(null, currentStack[h], seeds[h], h);
+ }
+ else
+ // otherwise call the method with the former computed root
+ // value
+ {
+ tree = this.generateCurrentAuthpathAndRoot(currentRoots[h + 1], currentStack[h], seeds[h], h);
+ }
+
+ }
+ catch (Exception e1)
+ {
+ e1.printStackTrace();
+ }
+
+ // set initial values needed for the private key construction
+ for (int i = 0; i < heightOfTrees[h]; i++)
+ {
+ System.arraycopy(tree.getAuthPath()[i], 0, currentAuthPaths[h][i], 0, mdLength);
+ }
+ currentRetain[h] = tree.getRetain();
+ currentTreehash[h] = tree.getTreehash();
+ System.arraycopy(tree.getRoot(), 0, currentRoots[h], 0, mdLength);
+ }
+
+ // --- calculation of next authpaths and next roots (AUTHPATHS+, ROOTS+)
+ // ------
+ for (int h = numLayer - 2; h >= 0; h--)
+ {
+ GMSSRootCalc tree = this.generateNextAuthpathAndRoot(nextStack[h], seeds[h + 1], h + 1);
+
+ // set initial values needed for the private key construction
+ for (int i = 0; i < heightOfTrees[h + 1]; i++)
+ {
+ System.arraycopy(tree.getAuthPath()[i], 0, nextAuthPaths[h][i], 0, mdLength);
+ }
+ nextRetain[h] = tree.getRetain();
+ nextTreehash[h] = tree.getTreehash();
+ System.arraycopy(tree.getRoot(), 0, nextRoots[h], 0, mdLength);
+
+ // create seed for the Merkle tree after next (nextNextSeeds)
+ // SEEDs++
+ System.arraycopy(seeds[h + 1], 0, this.nextNextSeeds[h], 0, mdLength);
+ }
+ // ------------
+
+ // generate JDKGMSSPublicKey
+ GMSSPublicKeyParameters publicKey = new GMSSPublicKeyParameters(currentRoots[0], gmssPS);
+
+ // generate the JDKGMSSPrivateKey
+ GMSSPrivateKeyParameters privateKey = new GMSSPrivateKeyParameters(currentSeeds, nextNextSeeds, currentAuthPaths,
+ nextAuthPaths, currentTreehash, nextTreehash, currentStack, nextStack, currentRetain, nextRetain, nextRoots, currentRootSigs, gmssPS, digestProvider);
+
+ // return the KeyPair
+ return (new AsymmetricCipherKeyPair(publicKey, privateKey));
+ }
+
+ /**
+ * calculates the authpath for tree in layer h which starts with seed[h]
+ * additionally computes the rootSignature of underlaying root
+ *
+ * @param currentStack stack used for the treehash instance created by this method
+ * @param lowerRoot stores the root of the lower tree
+ * @param seed starting seeds
+ * @param h actual layer
+ */
+"
+1," private void sendEntityMessage(Object message) throws Exception {
+
+ MockEndpoint endpoint = getMockEndpoint(""mock:result"");
+ endpoint.reset();
+ endpoint.expectedMessageCount(1);
+
+ template.sendBody(""direct:start1"", message);
+
+ assertMockEndpointsSatisfied();
+
+ List list = endpoint.getReceivedExchanges();
+ Exchange exchange = list.get(0);
+ String xml = exchange.getIn().getBody(String.class);
+ assertTrue(""Get a wrong transformed message"", xml.indexOf("""") > 0);
+
+
+
+ try {
+ template.sendBody(""direct:start2"", message);
+ fail(""Expect an exception here"");
+ } catch (Exception ex) {
+ // expect an exception here
+ assertTrue(""Get a wrong exception"", ex instanceof CamelExecutionException);
+ // the file could not be found
+ assertTrue(""Get a wrong exception cause"", ex.getCause() instanceof TransformerException);
+ }
+
+ }
+
+
+ @Override
+"
+1," public static String getContextPath(HttpServletRequest request) {
+ String contextPath = (String) request.getAttribute(INCLUDE_CONTEXT_PATH_ATTRIBUTE);
+ if (contextPath == null) {
+ contextPath = request.getContextPath();
+ }
+ if (""/"".equals(contextPath)) {
+ // Invalid case, but happens for includes on Jetty: silently adapt it.
+ contextPath = """";
+ }
+ return decodeRequestString(request, contextPath);
+ }
+
+ /**
+ * Find the Shiro {@link WebEnvironment} for this web application, which is typically loaded via the
+ * {@link org.apache.shiro.web.env.EnvironmentLoaderListener}.
+ *
+ * This implementation rethrows an exception that happened on environment startup to differentiate between a failed
+ * environment startup and no environment at all.
+ *
+ * @param sc ServletContext to find the web application context for
+ * @return the root WebApplicationContext for this web app
+ * @throws IllegalStateException if the root WebApplicationContext could not be found
+ * @see org.apache.shiro.web.env.EnvironmentLoader#ENVIRONMENT_ATTRIBUTE_KEY
+ * @since 1.2
+ */
+"
+1," private void doTestRewrite(String config, String request, String expectedURI) throws Exception {
+ Tomcat tomcat = getTomcatInstance();
+
+ // No file system docBase required
+ Context ctx = tomcat.addContext("""", null);
+
+ RewriteValve rewriteValve = new RewriteValve();
+ ctx.getPipeline().addValve(rewriteValve);
+
+ rewriteValve.setConfiguration(config);
+
+ // Note: URLPatterns should be URL encoded
+ // (http://svn.apache.org/r285186)
+ Tomcat.addServlet(ctx, ""snoop"", new SnoopServlet());
+ ctx.addServletMapping(""/a/%255A"", ""snoop"");
+ ctx.addServletMapping(""/c/*"", ""snoop"");
+
+ tomcat.start();
+
+ ByteChunk res = getUrl(""http://localhost:"" + getPort() + request);
+
+ String body = res.toString();
+ RequestDescriptor requestDesc = SnoopResult.parse(body);
+ String requestURI = requestDesc.getRequestInfo(""REQUEST-URI"");
+ Assert.assertEquals(expectedURI, requestURI);
+ }
+"
+1," public Result isAllowed(String principalId) {
+ LockoutPolicy lockoutPolicy = lockoutPolicyRetriever.getLockoutPolicy();
+
+ if (!lockoutPolicy.isLockoutEnabled()) {
+ return new Result(true, 0);
+ }
+
+ long eventsAfter = timeService.getCurrentTimeMillis() - lockoutPolicy.getCountFailuresWithin() * 1000;
+ List events = auditService.find(principalId, eventsAfter);
+
+ final int failureCount = sequentialFailureCount(events);
+
+ if (failureCount >= lockoutPolicy.getLockoutAfterFailures()) {
+ // Check whether time of most recent failure is within the lockout period
+ AuditEvent lastFailure = mostRecentFailure(events);
+ if (lastFailure != null && lastFailure.getTime() > timeService.getCurrentTimeMillis() - lockoutPolicy.getLockoutPeriodSeconds() * 1000) {
+ return new Result(false, failureCount);
+ }
+ }
+ return new Result(true, failureCount);
+ }
+
+ /**
+ * Counts the number of failures that occurred without an intervening
+ * successful login.
+ */
+"
+1," public void save() throws Exception {
+
+ if (getReadonly()) {
+ log.error(sm.getString(""memoryUserDatabase.readOnly""));
+ return;
+ }
+
+ if (!isWriteable()) {
+ log.warn(sm.getString(""memoryUserDatabase.notPersistable""));
+ return;
+ }
+
+ // Write out contents to a temporary file
+ File fileNew = new File(pathnameNew);
+ if (!fileNew.isAbsolute()) {
+ fileNew =
+ new File(System.getProperty(Globals.CATALINA_BASE_PROP), pathnameNew);
+ }
+ PrintWriter writer = null;
+ try {
+
+ // Configure our PrintWriter
+ FileOutputStream fos = new FileOutputStream(fileNew);
+ OutputStreamWriter osw = new OutputStreamWriter(fos, ""UTF8"");
+ writer = new PrintWriter(osw);
+
+ // Print the file prolog
+ writer.println("""");
+ writer.println("""");
+
+ // Print entries for each defined role, group, and user
+ Iterator> values = null;
+ values = getRoles();
+ while (values.hasNext()) {
+ writer.print("" "");
+ writer.println(values.next());
+ }
+ values = getGroups();
+ while (values.hasNext()) {
+ writer.print("" "");
+ writer.println(values.next());
+ }
+ values = getUsers();
+ while (values.hasNext()) {
+ writer.print("" "");
+ writer.println(values.next());
+ }
+
+ // Print the file epilog
+ writer.println("" "");
+
+ // Check for errors that occurred while printing
+ if (writer.checkError()) {
+ writer.close();
+ fileNew.delete();
+ throw new IOException
+ (sm.getString(""memoryUserDatabase.writeException"",
+ fileNew.getAbsolutePath()));
+ }
+ writer.close();
+ } catch (IOException e) {
+ if (writer != null) {
+ writer.close();
+ }
+ fileNew.delete();
+ throw e;
+ }
+
+ // Perform the required renames to permanently save this file
+ File fileOld = new File(pathnameOld);
+ if (!fileOld.isAbsolute()) {
+ fileOld =
+ new File(System.getProperty(Globals.CATALINA_BASE_PROP), pathnameOld);
+ }
+ fileOld.delete();
+ File fileOrig = new File(pathname);
+ if (!fileOrig.isAbsolute()) {
+ fileOrig =
+ new File(System.getProperty(Globals.CATALINA_BASE_PROP), pathname);
+ }
+ if (fileOrig.exists()) {
+ fileOld.delete();
+ if (!fileOrig.renameTo(fileOld)) {
+ throw new IOException
+ (sm.getString(""memoryUserDatabase.renameOld"",
+ fileOld.getAbsolutePath()));
+ }
+ }
+ if (!fileNew.renameTo(fileOrig)) {
+ if (fileOld.exists()) {
+ fileOld.renameTo(fileOrig);
+ }
+ throw new IOException
+ (sm.getString(""memoryUserDatabase.renameNew"",
+ fileOrig.getAbsolutePath()));
+ }
+ fileOld.delete();
+
+ }
+
+
+ /**
+ * Return a String representation of this UserDatabase.
+ */
+ @Override
+"
+1," public static boolean configSuccess(HierarchicalConfiguration reply) {
+ if (reply != null) {
+ if (reply.containsKey(""ok"")) {
+ return true;
+ }
+ }
+ return false;
+ }
+"
+1," public void process(Exchange exchange) throws Exception {
+"
+1," public boolean createDB(String dbName, PortletRequest request) {
+ Connection conn = null;
+ try {
+ conn = DerbyConnectionUtil.getDerbyConnection(dbName,
+ DerbyConnectionUtil.CREATE_DB_PROP);
+ portlet.addInfoMessage(request, portlet.getLocalizedString(request, ""sysdb.infoMsg01"", dbName));
+ return true;
+ } catch (Throwable e) {
+ portlet.addErrorMessage(request, portlet.getLocalizedString(request, ""sysdb.errorMsg01""), e.getMessage());
+ return false;
+ } finally {
+ // close DB connection
+ try {
+ if (conn != null) {
+ conn.close();
+ }
+ } catch (SQLException e) {
+ portlet.addErrorMessage(request, portlet.getLocalizedString(request, ""sysdb.errorMsg02""), e.getMessage());
+ }
+ }
+ }
+
+"
+1," public UnixUser authenticate(String username, String password) throws PAMException {
+ this.password = password;
+ try {
+ check(libpam.pam_set_item(pht,PAM_USER,username),""pam_set_item failed"");
+ check(libpam.pam_authenticate(pht,0),""pam_authenticate failed"");
+ check(libpam.pam_setcred(pht,0),""pam_setcred failed"");
+ // several different error code seem to be used to represent authentication failures
+// check(libpam.pam_acct_mgmt(pht,0),""pam_acct_mgmt failed"");
+
+ PointerByReference r = new PointerByReference();
+ check(libpam.pam_get_item(pht,PAM_USER,r),""pam_get_item failed"");
+ String userName = r.getValue().getString(0);
+ passwd pwd = libc.getpwnam(userName);
+ if(pwd==null)
+ throw new PAMException(""Authentication succeeded but no user information is available"");
+ return new UnixUser(userName,pwd);
+ } finally {
+ this.password = null;
+ }
+ }
+
+ /**
+ * Returns the groups a user belongs to
+ * @param username
+ * @return Set of group names
+ * @throws PAMException
+ * @deprecated
+ * Pointless and ugly convenience method.
+ */
+"
+1," public void serveImage(ResourceRequest req, ResourceResponse resp) throws IOException {
+ String fn = req.getRenderParameters().getValue(""fn"");
+ String ct = req.getRenderParameters().getValue(""ct"");
+
+ resp.setContentType(ct);
+
+ String path = req.getPortletContext().getRealPath(fn);
+ File file = new File(path);
+ OutputStream os = resp.getPortletOutputStream();
+ Files.copy(file.toPath(), os);
+ os.flush();
+ }
+"
+1," public Http11NioProcessor createProcessor() {
+ Http11NioProcessor processor = new Http11NioProcessor(
+ proto.getMaxHttpHeaderSize(), (NioEndpoint)proto.endpoint,
+ proto.getMaxTrailerSize());
+ processor.setAdapter(proto.getAdapter());
+ processor.setMaxKeepAliveRequests(proto.getMaxKeepAliveRequests());
+ processor.setKeepAliveTimeout(proto.getKeepAliveTimeout());
+ processor.setConnectionUploadTimeout(
+ proto.getConnectionUploadTimeout());
+ processor.setDisableUploadTimeout(proto.getDisableUploadTimeout());
+ processor.setCompressionMinSize(proto.getCompressionMinSize());
+ processor.setCompression(proto.getCompression());
+ processor.setNoCompressionUserAgents(proto.getNoCompressionUserAgents());
+ processor.setCompressableMimeTypes(proto.getCompressableMimeTypes());
+ processor.setRestrictedUserAgents(proto.getRestrictedUserAgents());
+ processor.setSocketBuffer(proto.getSocketBuffer());
+ processor.setMaxSavePostSize(proto.getMaxSavePostSize());
+ processor.setServer(proto.getServer());
+ register(processor);
+ return processor;
+ }
+
+ @Override
+"
+1," public Document getMetaData(Idp config, TrustedIdp serviceConfig) throws ProcessingException {
+
+ try {
+ Crypto crypto = CertsUtils.createCrypto(config.getCertificate());
+
+ ByteArrayOutputStream bout = new ByteArrayOutputStream(4096);
+ Writer streamWriter = new OutputStreamWriter(bout, ""UTF-8"");
+ XMLStreamWriter writer = XML_OUTPUT_FACTORY.createXMLStreamWriter(streamWriter);
+
+ writer.writeStartDocument(""UTF-8"", ""1.0"");
+
+ String referenceID = IDGenerator.generateID(""_"");
+ writer.writeStartElement(""md"", ""EntityDescriptor"", SAML2_METADATA_NS);
+ writer.writeAttribute(""ID"", referenceID);
+
+ String serviceURL = config.getIdpUrl().toString();
+ writer.writeAttribute(""entityID"", serviceURL);
+
+ writer.writeNamespace(""md"", SAML2_METADATA_NS);
+ writer.writeNamespace(""fed"", WS_FEDERATION_NS);
+ writer.writeNamespace(""wsa"", WS_ADDRESSING_NS);
+ writer.writeNamespace(""auth"", WS_FEDERATION_NS);
+ writer.writeNamespace(""xsi"", SCHEMA_INSTANCE_NS);
+
+ if (""http://docs.oasis-open.org/wsfed/federation/200706"".equals(serviceConfig.getProtocol())) {
+ writeFederationMetadata(writer, serviceConfig, serviceURL);
+ } else if (""urn:oasis:names:tc:SAML:2.0:profiles:SSO:browser"".equals(serviceConfig.getProtocol())) {
+ writeSAMLMetadata(writer, serviceConfig, serviceURL, crypto);
+ }
+
+ writer.writeEndElement(); // EntityDescriptor
+
+ writer.writeEndDocument();
+
+ streamWriter.flush();
+ bout.flush();
+ //
+
+ if (LOG.isDebugEnabled()) {
+ String out = new String(bout.toByteArray());
+ LOG.debug(""***************** unsigned ****************"");
+ LOG.debug(out);
+ LOG.debug(""***************** unsigned ****************"");
+ }
+
+ InputStream is = new ByteArrayInputStream(bout.toByteArray());
+
+ Document result = SignatureUtils.signMetaInfo(crypto, null, config.getCertificatePassword(), is, referenceID);
+ if (result != null) {
+ return result;
+ } else {
+ throw new RuntimeException(""Failed to sign the metadata document: result=null"");
+ }
+ } catch (ProcessingException e) {
+ throw e;
+ } catch (Exception e) {
+ LOG.error(""Error creating service metadata information "", e);
+ throw new ProcessingException(""Error creating service metadata information: "" + e.getMessage());
+ }
+
+ }
+
+"
+1," public void send(final String format, final Object... args) {
+ descriptor.debug(logger, format, args);
+ }
+ }
+ final Debug debug = new Debug();
+
+ Set users = null;
+
+ final Run, ?> currentRun = context.getRun();
+ if (currentRun == null) {
+ debug.send(""currentRun was null"");
+ } else {
+ final AbstractTestResultAction> testResultAction = currentRun.getAction(AbstractTestResultAction.class);
+ if (testResultAction == null) {
+ debug.send(""testResultAction was null"");
+ } else {
+ if (testResultAction.getFailCount() <= 0) {
+ debug.send(""getFailCount() returned <= 0"");
+ } else {
+ users = new HashSet<>();
+ debug.send(""Collecting builds where a test started failing..."");
+ final HashSet> buildsWhereATestStartedFailing = new HashSet<>();
+ for (final TestResult caseResult : testResultAction.getFailedTests()) {
+ final Run, ?> runWhereTestStartedFailing = caseResult.getFailedSinceRun();
+ if (runWhereTestStartedFailing != null) {
+ debug.send("" runWhereTestStartedFailing: %d"", runWhereTestStartedFailing.getNumber());
+ buildsWhereATestStartedFailing.add(runWhereTestStartedFailing);
+ } else {
+ context.getListener().error(""getFailedSinceRun returned null for %s"", caseResult.getFullDisplayName());
+ }
+ }
+ // For each build where a test started failing, walk backward looking for build results worse than
+ // UNSTABLE. All of those builds will be used to find suspects.
+ debug.send(""Collecting builds with suspects..."");
+ final HashSet> buildsWithSuspects = new HashSet<>();
+ for (final Run, ?> buildWhereATestStartedFailing : buildsWhereATestStartedFailing) {
+ debug.send("" buildWhereATestStartedFailing: %d"", buildWhereATestStartedFailing.getNumber());
+ buildsWithSuspects.add(buildWhereATestStartedFailing);
+ Run, ?> previousBuildToCheck = buildWhereATestStartedFailing.getPreviousCompletedBuild();
+ if (previousBuildToCheck != null) {
+ debug.send("" previousBuildToCheck: %d"", previousBuildToCheck.getNumber());
+ }
+ while (previousBuildToCheck != null) {
+ if (buildsWithSuspects.contains(previousBuildToCheck)) {
+ // Short-circuit if the build to check has already been checked.
+ debug.send("" already contained in buildsWithSuspects; stopping search"");
+ break;
+ }
+ final Result previousResult = previousBuildToCheck.getResult();
+ if (previousResult == null) {
+ debug.send("" previousResult was null"");
+ } else {
+ debug.send("" previousResult: %s"", previousResult.toString());
+ if (previousResult.isBetterThan(Result.FAILURE)) {
+ debug.send("" previousResult was better than FAILURE; stopping search"");
+ break;
+ } else {
+ debug.send("" previousResult was not better than FAILURE; adding to buildsWithSuspects; continuing search"");
+ buildsWithSuspects.add(previousBuildToCheck);
+ previousBuildToCheck = previousBuildToCheck.getPreviousCompletedBuild();
+ if (previousBuildToCheck != null) {
+ debug.send("" previousBuildToCheck: %d"", previousBuildToCheck.getNumber());
+ }
+ }
+ }
+ }
+ }
+ debug.send(""Collecting suspects..."");
+ users.addAll(RecipientProviderUtilities.getChangeSetAuthors(buildsWithSuspects, debug));
+ users.addAll(RecipientProviderUtilities.getUsersTriggeringTheBuilds(buildsWithSuspects, debug));
+ }
+ }
+ }
+
+ if (users != null) {
+ RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug);
+ }
+ }
+
+ @Extension
+ public static final class DescriptorImpl extends RecipientProviderDescriptor {
+ @Override
+ public String getDisplayName() {
+ return ""Suspects Causing Unit Tests to Begin Failing"";
+ }
+ }
+
+}
+"
+1," public SocketState process(SocketWrapper socket)
+ throws IOException {
+ RequestInfo rp = request.getRequestProcessor();
+ rp.setStage(org.apache.coyote.Constants.STAGE_PARSE);
+
+ // Setting up the socket
+ this.socket = socket;
+ long socketRef = socket.getSocket().longValue();
+ Socket.setrbb(socketRef, inputBuffer);
+ Socket.setsbb(socketRef, outputBuffer);
+
+ // Error flag
+ error = false;
+
+ boolean keptAlive = false;
+
+ while (!error && !endpoint.isPaused()) {
+
+ // Parsing the request header
+ try {
+ // Get first message of the request
+ if (!readMessage(requestHeaderMessage, true, keptAlive)) {
+ // This means that no data is available right now
+ // (long keepalive), so that the processor should be recycled
+ // and the method should return true
+ break;
+ }
+ // Check message type, process right away and break if
+ // not regular request processing
+ int type = requestHeaderMessage.getByte();
+ if (type == Constants.JK_AJP13_CPING_REQUEST) {
+ if (Socket.send(socketRef, pongMessageArray, 0,
+ pongMessageArray.length) < 0) {
+ error = true;
+ }
+ continue;
+ } else if(type != Constants.JK_AJP13_FORWARD_REQUEST) {
+ // Usually the servlet didn't read the previous request body
+ if(log.isDebugEnabled()) {
+ log.debug(""Unexpected message: ""+type);
+ }
+ continue;
+ }
+
+ keptAlive = true;
+ request.setStartTime(System.currentTimeMillis());
+ } catch (IOException e) {
+ error = true;
+ break;
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.debug(sm.getString(""ajpprocessor.header.error""), t);
+ // 400 - Bad Request
+ response.setStatus(400);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+
+ if (!error) {
+ // Setting up filters, and parse some request headers
+ rp.setStage(org.apache.coyote.Constants.STAGE_PREPARE);
+ try {
+ prepareRequest();
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.debug(sm.getString(""ajpprocessor.request.prepare""), t);
+ // 400 - Internal Server Error
+ response.setStatus(400);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+ }
+
+ // Process the request in the adapter
+ if (!error) {
+ try {
+ rp.setStage(org.apache.coyote.Constants.STAGE_SERVICE);
+ adapter.service(request, response);
+ } catch (InterruptedIOException e) {
+ error = true;
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ log.error(sm.getString(""ajpprocessor.request.process""), t);
+ // 500 - Internal Server Error
+ response.setStatus(500);
+ adapter.log(request, response, 0);
+ error = true;
+ }
+ }
+
+ if (isAsync() && !error) {
+ break;
+ }
+
+ // Finish the response if not done yet
+ if (!finished) {
+ try {
+ finish();
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ error = true;
+ }
+ }
+
+ // If there was an error, make sure the request is counted as
+ // and error, and update the statistics counter
+ if (error) {
+ response.setStatus(500);
+ }
+ request.updateCounters();
+
+ rp.setStage(org.apache.coyote.Constants.STAGE_KEEPALIVE);
+ recycle(false);
+ }
+
+ rp.setStage(org.apache.coyote.Constants.STAGE_ENDED);
+
+ if (error || endpoint.isPaused()) {
+ return SocketState.CLOSED;
+ } else if (isAsync()) {
+ return SocketState.LONG;
+ } else {
+ return SocketState.OPEN;
+ }
+ }
+
+
+ // ----------------------------------------------------- ActionHook Methods
+
+
+ /**
+ * Send an action to the connector.
+ *
+ * @param actionCode Type of the action
+ * @param param Action parameter
+ */
+ @Override
+"
+1," public void testRedirectActionPrefixWithEmptyExtension() throws Exception {
+ Map parameterMap = new HashMap();
+ parameterMap.put(DefaultActionMapper.REDIRECT_ACTION_PREFIX + ""myAction"", """");
+
+ StrutsMockHttpServletRequest request = new StrutsMockHttpServletRequest();
+ request.setupGetServletPath(""/someServletPath"");
+ request.setParameterMap(parameterMap);
+
+ DefaultActionMapper defaultActionMapper = new DefaultActionMapper();
+ defaultActionMapper.setContainer(container);
+ defaultActionMapper.setExtensions("",,"");
+ ActionMapping actionMapping = defaultActionMapper.getMapping(request, configManager);
+
+
+ StrutsResultSupport result = (StrutsResultSupport) actionMapping.getResult();
+ assertNotNull(result);
+ assertTrue(result instanceof ServletRedirectResult);
+
+ assertEquals(""myAction"", result.getLocation());
+
+ // TODO: need to test location but there's noaccess to the property/method, unless we use reflection
+ }
+
+"
+1," public static void assertSettings(Settings left, Settings right, boolean compareClusterName) {
+ ImmutableSet> entries0 = left.getAsMap().entrySet();
+ Map entries1 = right.getAsMap();
+ assertThat(entries0.size(), equalTo(entries1.size()));
+ for (Map.Entry entry : entries0) {
+ if(entry.getKey().equals(ClusterName.SETTING) && compareClusterName == false) {
+ continue;
+ }
+ assertThat(entries1, hasEntry(entry.getKey(), entry.getValue()));
+ }
+ }
+
+"
+1," protected void service(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
+ log.trace(""Service: {}"", request);
+
+ // is there a consumer registered for the request.
+ HttpConsumer consumer = getServletResolveConsumerStrategy().resolve(request, getConsumers());
+ if (consumer == null) {
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+
+ if (consumer.getEndpoint().getHttpMethodRestrict() != null) {
+ Iterator it = ObjectHelper.createIterable(consumer.getEndpoint().getHttpMethodRestrict()).iterator();
+ boolean match = false;
+ while (it.hasNext()) {
+ String method = it.next().toString();
+ if (method.equalsIgnoreCase(request.getMethod())) {
+ match = true;
+ break;
+ }
+ }
+ if (!match) {
+ response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
+ return;
+ }
+ }
+
+ if (""TRACE"".equals(request.getMethod()) && !consumer.isTraceEnabled()) {
+ response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
+ return;
+ }
+
+ final Exchange result = (Exchange) request.getAttribute(EXCHANGE_ATTRIBUTE_NAME);
+ if (result == null) {
+ // no asynchronous result so leverage continuation
+ final Continuation continuation = ContinuationSupport.getContinuation(request);
+ if (continuation.isInitial() && continuationTimeout != null) {
+ // set timeout on initial
+ continuation.setTimeout(continuationTimeout);
+ }
+
+ // are we suspended and a request is dispatched initially?
+ if (consumer.isSuspended() && continuation.isInitial()) {
+ response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
+ return;
+ }
+
+ if (continuation.isExpired()) {
+ String id = (String) continuation.getAttribute(EXCHANGE_ATTRIBUTE_ID);
+ // remember this id as expired
+ expiredExchanges.put(id, id);
+ log.warn(""Continuation expired of exchangeId: {}"", id);
+ response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
+ return;
+ }
+
+ // a new request so create an exchange
+ final Exchange exchange = new DefaultExchange(consumer.getEndpoint(), ExchangePattern.InOut);
+
+ if (consumer.getEndpoint().isBridgeEndpoint()) {
+ exchange.setProperty(Exchange.SKIP_GZIP_ENCODING, Boolean.TRUE);
+ exchange.setProperty(Exchange.SKIP_WWW_FORM_URLENCODED, Boolean.TRUE);
+ }
+ if (consumer.getEndpoint().isDisableStreamCache()) {
+ exchange.setProperty(Exchange.DISABLE_HTTP_STREAM_CACHE, Boolean.TRUE);
+ }
+
+ HttpHelper.setCharsetFromContentType(request.getContentType(), exchange);
+
+ exchange.setIn(new HttpMessage(exchange, request, response));
+ // set context path as header
+ String contextPath = consumer.getEndpoint().getPath();
+ exchange.getIn().setHeader(""CamelServletContextPath"", contextPath);
+
+ String httpPath = (String)exchange.getIn().getHeader(Exchange.HTTP_PATH);
+ // here we just remove the CamelServletContextPath part from the HTTP_PATH
+ if (contextPath != null
+ && httpPath.startsWith(contextPath)) {
+ exchange.getIn().setHeader(Exchange.HTTP_PATH,
+ httpPath.substring(contextPath.length()));
+ }
+
+ if (log.isTraceEnabled()) {
+ log.trace(""Suspending continuation of exchangeId: {}"", exchange.getExchangeId());
+ }
+ continuation.setAttribute(EXCHANGE_ATTRIBUTE_ID, exchange.getExchangeId());
+
+ // we want to handle the UoW
+ try {
+ consumer.createUoW(exchange);
+ } catch (Exception e) {
+ log.error(""Error processing request"", e);
+ throw new ServletException(e);
+ }
+
+ // must suspend before we process the exchange
+ continuation.suspend();
+
+ ClassLoader oldTccl = overrideTccl(exchange);
+
+ if (log.isTraceEnabled()) {
+ log.trace(""Processing request for exchangeId: {}"", exchange.getExchangeId());
+ }
+ // use the asynchronous API to process the exchange
+
+ consumer.getAsyncProcessor().process(exchange, new AsyncCallback() {
+ public void done(boolean doneSync) {
+ // check if the exchange id is already expired
+ boolean expired = expiredExchanges.remove(exchange.getExchangeId()) != null;
+ if (!expired) {
+ if (log.isTraceEnabled()) {
+ log.trace(""Resuming continuation of exchangeId: {}"", exchange.getExchangeId());
+ }
+ // resume processing after both, sync and async callbacks
+ continuation.setAttribute(EXCHANGE_ATTRIBUTE_NAME, exchange);
+ continuation.resume();
+ } else {
+ log.warn(""Cannot resume expired continuation of exchangeId: {}"", exchange.getExchangeId());
+ }
+ }
+ });
+
+ if (oldTccl != null) {
+ restoreTccl(exchange, oldTccl);
+ }
+
+ // return to let Jetty continuation to work as it will resubmit and invoke the service
+ // method again when its resumed
+ return;
+ }
+
+ try {
+ // now lets output to the response
+ if (log.isTraceEnabled()) {
+ log.trace(""Resumed continuation and writing response for exchangeId: {}"", result.getExchangeId());
+ }
+ Integer bs = consumer.getEndpoint().getResponseBufferSize();
+ if (bs != null) {
+ log.trace(""Using response buffer size: {}"", bs);
+ response.setBufferSize(bs);
+ }
+ consumer.getBinding().writeResponse(result, response);
+ } catch (IOException e) {
+ log.error(""Error processing request"", e);
+ throw e;
+ } catch (Exception e) {
+ log.error(""Error processing request"", e);
+ throw new ServletException(e);
+ } finally {
+ consumer.doneUoW(result);
+ }
+ }
+
+"
+1," protected void submit(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, FormException {
+ super.submit(req,rsp);
+ JSONObject json = req.getSubmittedForm();
+
+ makeDisabled(req.getParameter(""disable"")!=null);
+
+ jdk = req.getParameter(""jdk"");
+ if(req.getParameter(""hasCustomQuietPeriod"")!=null) {
+ quietPeriod = Integer.parseInt(req.getParameter(""quiet_period""));
+ } else {
+ quietPeriod = null;
+ }
+ if(req.getParameter(""hasCustomScmCheckoutRetryCount"")!=null) {
+ scmCheckoutRetryCount = Integer.parseInt(req.getParameter(""scmCheckoutRetryCount""));
+ } else {
+ scmCheckoutRetryCount = null;
+ }
+ blockBuildWhenDownstreamBuilding = req.getParameter(""blockBuildWhenDownstreamBuilding"")!=null;
+ blockBuildWhenUpstreamBuilding = req.getParameter(""blockBuildWhenUpstreamBuilding"")!=null;
+
+ if(req.hasParameter(""customWorkspace"")) {
+ customWorkspace = Util.fixEmptyAndTrim(req.getParameter(""customWorkspace.directory""));
+ } else {
+ customWorkspace = null;
+ }
+
+ if (json.has(""scmCheckoutStrategy""))
+ scmCheckoutStrategy = req.bindJSON(SCMCheckoutStrategy.class,
+ json.getJSONObject(""scmCheckoutStrategy""));
+ else
+ scmCheckoutStrategy = null;
+
+
+ if(req.getParameter(""hasSlaveAffinity"")!=null) {
+ assignedNode = Util.fixEmptyAndTrim(req.getParameter(""_.assignedLabelString""));
+ } else {
+ assignedNode = null;
+ }
+ canRoam = assignedNode==null;
+
+ concurrentBuild = req.getSubmittedForm().has(""concurrentBuild"");
+
+ authToken = BuildAuthorizationToken.create(req);
+
+ setScm(SCMS.parseSCM(req,this));
+
+ for (Trigger t : triggers())
+ t.stop();
+ triggers = buildDescribable(req, Trigger.for_(this));
+ for (Trigger t : triggers)
+ t.start(this,true);
+
+ for (Publisher _t : Descriptor.newInstancesFromHeteroList(req, json, ""publisher"", Jenkins.getInstance().getExtensionList(BuildTrigger.DescriptorImpl.class))) {
+ BuildTrigger t = (BuildTrigger) _t;
+ for (AbstractProject downstream : t.getChildProjects(this)) {
+ downstream.checkPermission(BUILD);
+ }
+ }
+ }
+
+ /**
+ * @deprecated
+ * As of 1.261. Use {@link #buildDescribable(StaplerRequest, List)} instead.
+ */
+"
+1," private JMXConnectorServer createServer(String serverName,
+ String bindAddress, int theRmiRegistryPort, int theRmiServerPort,
+ HashMap theEnv,
+ RMIClientSocketFactory registryCsf, RMIServerSocketFactory registrySsf,
+ RMIClientSocketFactory serverCsf, RMIServerSocketFactory serverSsf) {
+
+ // Create the RMI registry
+ Registry registry;
+ try {
+ registry = LocateRegistry.createRegistry(
+ theRmiRegistryPort, registryCsf, registrySsf);
+ } catch (RemoteException e) {
+ log.error(sm.getString(
+ ""jmxRemoteLifecycleListener.createRegistryFailed"",
+ serverName, Integer.toString(theRmiRegistryPort)), e);
+ return null;
+ }
+
+ if (bindAddress == null) {
+ bindAddress = ""localhost"";
+ }
+
+ String url = ""service:jmx:rmi://"" + bindAddress;
+ JMXServiceURL serviceUrl;
+ try {
+ serviceUrl = new JMXServiceURL(url);
+ } catch (MalformedURLException e) {
+ log.error(sm.getString(""jmxRemoteLifecycleListener.invalidURL"", serverName, url), e);
+ return null;
+ }
+
+ RMIConnectorServer cs = null;
+ try {
+ RMIJRMPServerImpl server = new RMIJRMPServerImpl(
+ rmiServerPortPlatform, serverCsf, serverSsf, theEnv);
+ cs = new RMIConnectorServer(serviceUrl, theEnv, server,
+ ManagementFactory.getPlatformMBeanServer());
+ cs.start();
+ registry.bind(""jmxrmi"", server);
+ log.info(sm.getString(""jmxRemoteLifecycleListener.start"",
+ Integer.toString(theRmiRegistryPort),
+ Integer.toString(theRmiServerPort), serverName));
+ } catch (IOException | AlreadyBoundException e) {
+ log.error(sm.getString(
+ ""jmxRemoteLifecycleListener.createServerFailed"",
+ serverName), e);
+ }
+ return cs;
+ }
+
+
+"
+1," private void decodeTest()
+ {
+ EllipticCurve curve = new EllipticCurve(
+ new ECFieldFp(new BigInteger(""6277101735386680763835789423207666416083908700390324961279"")), // q
+ new BigInteger(""fffffffffffffffffffffffffffffffefffffffffffffffc"", 16), // a
+ new BigInteger(""64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1"", 16)); // b
+
+ ECPoint p = ECPointUtil.decodePoint(curve, Hex.decode(""03188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012""));
+
+ if (!p.getAffineX().equals(new BigInteger(""188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012"", 16)))
+ {
+ fail(""x uncompressed incorrectly"");
+ }
+
+ if (!p.getAffineY().equals(new BigInteger(""7192b95ffc8da78631011ed6b24cdd573f977a11e794811"", 16)))
+ {
+ fail(""y uncompressed incorrectly"");
+ }
+ }
+
+ /**
+ * X9.62 - 1998,
+ * J.3.2, Page 155, ECDSA over the field Fp
+ * an example with 239 bit prime
+ */
+"
+1," private CipherText convertToCipherText(byte[] cipherTextSerializedBytes)
+ throws EncryptionException
+ {
+ try {
+ assert cipherTextSerializedBytes != null : ""cipherTextSerializedBytes cannot be null."";
+ assert cipherTextSerializedBytes.length > 0 : ""cipherTextSerializedBytes must be > 0 in length."";
+ ByteArrayInputStream bais = new ByteArrayInputStream(cipherTextSerializedBytes);
+ int kdfInfo = readInt(bais);
+ debug(""kdfInfo: "" + kdfInfo);
+ int kdfPrf = (kdfInfo >>> 28);
+ debug(""kdfPrf: "" + kdfPrf);
+ assert kdfPrf >= 0 && kdfPrf <= 15 : ""kdfPrf == "" + kdfPrf + "" must be between 0 and 15."";
+ int kdfVers = ( kdfInfo & 0x07ffffff);
+ assert kdfVers > 0 && kdfVers <= 99991231 : ""KDF Version ("" + kdfVers + "") out of range.""; // Really should be >= 20110203 (earliest).
+ debug(""convertToCipherText: kdfPrf = "" + kdfPrf + "", kdfVers = "" + kdfVers);
+ if ( kdfVers != CipherText.cipherTextVersion ) {
+ // NOTE: In future, support backward compatibility via this mechanism. When we do this
+ // we will have to compare as longs and watch out for sign extension of kdfInfo
+ // since it may have the sign bit set. Then we will do different things depending
+ // on what KDF version we encounter. However, as for now, since this is
+ // is first ESAPI 2.0 GA version, there nothing to be backward compatible with.
+ // (We did not promise backward compatibility for earlier release candidates.)
+ // Thus any version mismatch at this point is an error.
+ throw new InvalidClassException(""This serialized byte stream not compatible "" +
+ ""with loaded CipherText class. Version read = "" + kdfInfo +
+ ""; version from loaded CipherText class = "" + CipherText.cipherTextVersion);
+ }
+ long timestamp = readLong(bais);
+ debug(""convertToCipherText: timestamp = "" + new Date(timestamp));
+ short strSize = readShort(bais);
+ debug(""convertToCipherText: length of cipherXform = "" + strSize);
+ String cipherXform = readString(bais, strSize);
+ debug(""convertToCipherText: cipherXform = "" + cipherXform);
+ String[] parts = cipherXform.split(""/"");
+ assert parts.length == 3 : ""Malformed cipher transformation"";
+ String cipherMode = parts[1];
+ if ( ! CryptoHelper.isAllowedCipherMode(cipherMode) ) {
+ String msg = ""Cipher mode "" + cipherMode + "" is not an allowed cipher mode"";
+ throw new EncryptionException(msg, msg);
+ }
+ short keySize = readShort(bais);
+ debug(""convertToCipherText: keySize = "" + keySize);
+ short blockSize = readShort(bais);
+ debug(""convertToCipherText: blockSize = "" + blockSize);
+ short ivLen = readShort(bais);
+ debug(""convertToCipherText: ivLen = "" + ivLen);
+ byte[] iv = null;
+ if ( ivLen > 0 ) {
+ iv = new byte[ivLen];
+ bais.read(iv, 0, iv.length);
+ }
+ int ciphertextLen = readInt(bais);
+ debug(""convertToCipherText: ciphertextLen = "" + ciphertextLen);
+ assert ciphertextLen > 0 : ""convertToCipherText: Invalid cipher text length"";
+ byte[] rawCiphertext = new byte[ciphertextLen];
+ bais.read(rawCiphertext, 0, rawCiphertext.length);
+ short macLen = readShort(bais);
+ debug(""convertToCipherText: macLen = "" + macLen);
+ byte[] mac = null;
+ if ( macLen > 0 ) {
+ mac = new byte[macLen];
+ bais.read(mac, 0, mac.length);
+ }
+
+ CipherSpec cipherSpec = new CipherSpec(cipherXform, keySize);
+ cipherSpec.setBlockSize(blockSize);
+ cipherSpec.setIV(iv);
+ debug(""convertToCipherText: CipherSpec: "" + cipherSpec);
+ CipherText ct = new CipherText(cipherSpec);
+ assert (ivLen > 0 && ct.requiresIV()) :
+ ""convertToCipherText: Mismatch between IV length and cipher mode."";
+ ct.setCiphertext(rawCiphertext);
+ // Set this *AFTER* setting raw ciphertext because setCiphertext()
+ // method also sets encryption time.
+ ct.setEncryptionTimestamp(timestamp);
+ if ( macLen > 0 ) {
+ ct.storeSeparateMAC(mac);
+ }
+ return ct;
+ } catch(EncryptionException ex) {
+ throw new EncryptionException(""Cannot deserialize byte array into CipherText object"",
+ ""Cannot deserialize byte array into CipherText object"",
+ ex);
+ } catch (IOException e) {
+ throw new EncryptionException(""Cannot deserialize byte array into CipherText object"",
+ ""Cannot deserialize byte array into CipherText object"", e);
+ }
+ }
+
+"
+1," public static void writeXml(Node n, OutputStream os) throws TransformerException {
+ TransformerFactory tf = TransformerFactory.newInstance();
+ // identity
+ Transformer t = tf.newTransformer();
+ t.setOutputProperty(OutputKeys.INDENT, ""yes"");
+ t.transform(new DOMSource(n), new StreamResult(os));
+ }
+
+"
+1," void setTransferException(boolean transferException);
+
+ /**
+ * Gets the header filter strategy
+ *
+ * @return the strategy
+ */
+"
+1," public List getAclForPath(String path) {
+ List acls = zkACLProvider.getACLsToAdd(path);
+ return acls;
+ }
+ };
+ }
+
+"
+1," protected void forwardToErrorPage(Request request,
+ HttpServletResponse response, LoginConfig config)
+ throws IOException {
+
+ String errorPage = config.getErrorPage();
+ if (errorPage == null || errorPage.length() == 0) {
+ String msg = sm.getString(""formAuthenticator.noErrorPage"",
+ context.getName());
+ log.warn(msg);
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
+ msg);
+ return;
+ }
+
+ RequestDispatcher disp =
+ context.getServletContext().getRequestDispatcher
+ (config.getErrorPage());
+ try {
+ if (context.fireRequestInitEvent(request)) {
+ disp.forward(request.getRequest(), response);
+ context.fireRequestDestroyEvent(request);
+ }
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ String msg = sm.getString(""formAuthenticator.forwardErrorFail"");
+ log.warn(msg, t);
+ request.setAttribute(RequestDispatcher.ERROR_EXCEPTION, t);
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
+ msg);
+ }
+ }
+
+
+ /**
+ * Does this request match the saved one (so that it must be the redirect
+ * we signaled after successful authentication?
+ *
+ * @param request The request to be verified
+ * @return true
if the requests matched the saved one
+ */
+"
+1," public void testSnapshotMoreThanOnce() throws ExecutionException, InterruptedException, IOException {
+ Client client = client();
+ final File tempDir = newTempDir(LifecycleScope.SUITE).getAbsoluteFile();
+ logger.info(""--> creating repository"");
+ assertAcked(client.admin().cluster().preparePutRepository(""test-repo"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder()
+ .put(""location"", tempDir)
+ .put(""compress"", randomBoolean())
+ .put(""chunk_size"", randomIntBetween(100, 1000))));
+
+ // only one shard
+ assertAcked(prepareCreate(""test"").setSettings(ImmutableSettings.builder()
+ .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
+ ));
+ ensureYellow();
+ logger.info(""--> indexing"");
+
+ final int numDocs = randomIntBetween(10, 100);
+ IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
+ for (int i = 0; i < builders.length; i++) {
+ builders[i] = client().prepareIndex(""test"", ""doc"", Integer.toString(i)).setSource(""foo"", ""bar"" + i);
+ }
+ indexRandom(true, builders);
+ flushAndRefresh();
+ assertNoFailures(client().admin().indices().prepareOptimize(""test"").setFlush(true).setMaxNumSegments(1).get());
+
+ CreateSnapshotResponse createSnapshotResponseFirst = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test"").setWaitForCompletion(true).setIndices(""test"").get();
+ assertThat(createSnapshotResponseFirst.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponseFirst.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseFirst.getSnapshotInfo().totalShards()));
+ assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+ {
+ SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus(""test-repo"").setSnapshots(""test"").get().getSnapshots().get(0);
+ List shards = snapshotStatus.getShards();
+ for (SnapshotIndexShardStatus status : shards) {
+ assertThat(status.getStats().getProcessedFiles(), greaterThan(1));
+ }
+ }
+ if (frequently()) {
+ logger.info(""--> upgrade"");
+ client().admin().indices().prepareUpdateSettings(""test"").setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, ""none"")).get();
+ backwardsCluster().allowOnAllNodes(""test"");
+ logClusterState();
+ boolean upgraded;
+ do {
+ logClusterState();
+ CountResponse countResponse = client().prepareCount().get();
+ assertHitCount(countResponse, numDocs);
+ upgraded = backwardsCluster().upgradeOneNode();
+ ensureYellow();
+ countResponse = client().prepareCount().get();
+ assertHitCount(countResponse, numDocs);
+ } while (upgraded);
+ client().admin().indices().prepareUpdateSettings(""test"").setSettings(ImmutableSettings.builder().put(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, ""all"")).get();
+ }
+ if (cluster().numDataNodes() > 1 && randomBoolean()) { // only bump the replicas if we have enough nodes
+ logger.info(""--> move from 0 to 1 replica"");
+ client().admin().indices().prepareUpdateSettings(""test"").setSettings(ImmutableSettings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
+ }
+ logger.debug(""---> repo exists: "" + new File(tempDir, ""indices/test/0"").exists() + "" files: "" + Arrays.toString(new File(tempDir, ""indices/test/0"").list())); // it's only one shard!
+ CreateSnapshotResponse createSnapshotResponseSecond = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-1"").setWaitForCompletion(true).setIndices(""test"").get();
+ assertThat(createSnapshotResponseSecond.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponseSecond.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseSecond.getSnapshotInfo().totalShards()));
+ assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-1"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+ {
+ SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus(""test-repo"").setSnapshots(""test-1"").get().getSnapshots().get(0);
+ List shards = snapshotStatus.getShards();
+ for (SnapshotIndexShardStatus status : shards) {
+
+ assertThat(status.getStats().getProcessedFiles(), equalTo(1)); // we flush before the snapshot such that we have to process the segments_N files
+ }
+ }
+
+ client().prepareDelete(""test"", ""doc"", ""1"").get();
+ CreateSnapshotResponse createSnapshotResponseThird = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-2"").setWaitForCompletion(true).setIndices(""test"").get();
+ assertThat(createSnapshotResponseThird.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponseThird.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseThird.getSnapshotInfo().totalShards()));
+ assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-2"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+ {
+ SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus(""test-repo"").setSnapshots(""test-2"").get().getSnapshots().get(0);
+ List shards = snapshotStatus.getShards();
+ for (SnapshotIndexShardStatus status : shards) {
+ assertThat(status.getStats().getProcessedFiles(), equalTo(2)); // we flush before the snapshot such that we have to process the segments_N files plus the .del file
+ }
+ }
+ }
+"
+1," private boolean evaluate(String text) {
+ try {
+ InputSource inputSource = new InputSource(new StringReader(text));
+ return ((Boolean)expression.evaluate(inputSource, XPathConstants.BOOLEAN)).booleanValue();
+ } catch (XPathExpressionException e) {
+ return false;
+ }
+ }
+
+ @Override
+"
+1," public void testTrailingHeadersSizeLimit() throws Exception {
+ // Setup Tomcat instance
+ Tomcat tomcat = getTomcatInstance();
+
+ // Must have a real docBase - just use temp
+ Context ctx =
+ tomcat.addContext("""", System.getProperty(""java.io.tmpdir""));
+
+ Tomcat.addServlet(ctx, ""servlet"", new EchoHeaderServlet());
+ ctx.addServletMapping(""/"", ""servlet"");
+
+ // Limit the size of the trailing header
+ tomcat.getConnector().setProperty(""maxTrailerSize"", ""10"");
+ tomcat.start();
+
+ String[] request = new String[]{
+ ""POST /echo-params.jsp HTTP/1.1"" + SimpleHttpClient.CRLF +
+ ""Host: any"" + SimpleHttpClient.CRLF +
+ ""Transfer-encoding: chunked"" + SimpleHttpClient.CRLF +
+ ""Content-Type: application/x-www-form-urlencoded"" +
+ SimpleHttpClient.CRLF +
+ ""Connection: close"" + SimpleHttpClient.CRLF +
+ SimpleHttpClient.CRLF +
+ ""3"" + SimpleHttpClient.CRLF +
+ ""a=0"" + SimpleHttpClient.CRLF +
+ ""4"" + SimpleHttpClient.CRLF +
+ ""&b=1"" + SimpleHttpClient.CRLF +
+ ""0"" + SimpleHttpClient.CRLF +
+ ""x-trailer: Test"" + SimpleHttpClient.CRLF +
+ SimpleHttpClient.CRLF };
+
+ TrailerClient client =
+ new TrailerClient(tomcat.getConnector().getLocalPort());
+ client.setRequest(request);
+
+ client.connect();
+ client.processRequest();
+ // Expected to fail because the trailers are longer
+ // than the set limit of 10 bytes
+ assertTrue(client.isResponse500());
+ }
+
+ @Test
+"
+1," public void testIsExpressionIsFalse() throws Exception {
+ // given
+ String anExpression = ""foo"";
+
+ // when
+ boolean actual = ComponentUtils.isExpression(anExpression);
+
+ // then
+ assertFalse(actual);
+ }
+}
+
+class MockConfigurationProvider implements ConfigurationProvider {
+
+ public void destroy() {
+ }
+
+ public void init(Configuration configuration) throws ConfigurationException {
+ }
+
+ public boolean needsReload() {
+ return false;
+ }
+
+ public void loadPackages() throws ConfigurationException {
+ }
+
+ public void register(ContainerBuilder builder, LocatableProperties props) throws ConfigurationException {
+ builder.constant(StrutsConstants.STRUTS_TAG_ALTSYNTAX, ""false"");
+ }
+"
+1," @Test(timeout = 1000L) public void testLongMonths() throws Exception {
+ Calendar cal = Calendar.getInstance();
+ cal.set(Calendar.MONTH, Calendar.JULY);
+ new CronTab(""0 0 31 7 *"").floor(cal); // would infinite loop
+ }
+
+"
+1," public static File unzip(File zip, File toDir, Predicate filter) throws IOException {
+ if (!toDir.exists()) {
+ FileUtils.forceMkdir(toDir);
+ }
+
+ ZipFile zipFile = new ZipFile(zip);
+ try {
+ Enumeration extends ZipEntry> entries = zipFile.entries();
+ while (entries.hasMoreElements()) {
+ ZipEntry entry = entries.nextElement();
+ if (filter.test(entry)) {
+ File to = new File(toDir, entry.getName());
+ if (entry.isDirectory()) {
+ throwExceptionIfDirectoryIsNotCreatable(to);
+ } else {
+ File parent = to.getParentFile();
+ throwExceptionIfDirectoryIsNotCreatable(parent);
+ copy(zipFile, entry, to);
+ }
+ }
+ }
+ return toDir;
+
+ } finally {
+ zipFile.close();
+ }
+ }
+
+"
+1," public Authentication authenticate(Authentication req) throws AuthenticationException {
+ logger.debug(""Processing authentication request for "" + req.getName());
+
+ if (req.getCredentials() == null) {
+ BadCredentialsException e = new BadCredentialsException(""No password supplied"");
+ publish(new AuthenticationFailureBadCredentialsEvent(req, e));
+ throw e;
+ }
+
+ UaaUser user;
+ boolean passwordMatches = false;
+ user = getUaaUser(req);
+ if (user!=null) {
+ passwordMatches =
+ ((CharSequence) req.getCredentials()).length() != 0 && encoder.matches((CharSequence) req.getCredentials(), user.getPassword());
+ } else {
+ user = dummyUser;
+ }
+
+ if (!accountLoginPolicy.isAllowed(user, req)) {
+ logger.warn(""Login policy rejected authentication for "" + user.getUsername() + "", "" + user.getId()
+ + "". Ignoring login request."");
+ AuthenticationPolicyRejectionException e = new AuthenticationPolicyRejectionException(""Login policy rejected authentication"");
+ publish(new AuthenticationFailureLockedEvent(req, e));
+ throw e;
+ }
+
+ if (passwordMatches) {
+ logger.debug(""Password successfully matched for userId[""+user.getUsername()+""]:""+user.getId());
+
+ if (!allowUnverifiedUsers && !user.isVerified()) {
+ publish(new UnverifiedUserAuthenticationEvent(user, req));
+ logger.debug(""Account not verified: "" + user.getId());
+ throw new AccountNotVerifiedException(""Account not verified"");
+ }
+
+ int expiringPassword = getPasswordExpiresInMonths();
+ if (expiringPassword>0) {
+ Calendar cal = Calendar.getInstance();
+ cal.setTimeInMillis(user.getPasswordLastModified().getTime());
+ cal.add(Calendar.MONTH, expiringPassword);
+ if (cal.getTimeInMillis() < System.currentTimeMillis()) {
+ throw new PasswordExpiredException(""Your current password has expired. Please reset your password."");
+ }
+ }
+
+ Authentication success = new UaaAuthentication(new UaaPrincipal(user),
+ user.getAuthorities(), (UaaAuthenticationDetails) req.getDetails());
+ publish(new UserAuthenticationSuccessEvent(user, success));
+
+ return success;
+ }
+
+ if (user == dummyUser || user == null) {
+ logger.debug(""No user named '"" + req.getName() + ""' was found for origin:""+ origin);
+ publish(new UserNotFoundEvent(req));
+ } else {
+ logger.debug(""Password did not match for user "" + req.getName());
+ publish(new UserAuthenticationFailureEvent(user, req));
+ }
+ BadCredentialsException e = new BadCredentialsException(""Bad credentials"");
+ publish(new AuthenticationFailureBadCredentialsEvent(req, e));
+ throw e;
+ }
+
+"
+1," public void setUp() throws Exception {
+ lc = new LoggerContext();
+ lc.setName(""testContext"");
+ logger = lc.getLogger(LoggerSerializationTest.class);
+ // create the byte output stream
+ bos = new ByteArrayOutputStream();
+ oos = new ObjectOutputStream(bos);
+ }
+
+ @After
+"
+1," public void parseEmbedded(
+ InputStream stream, ContentHandler handler, Metadata metadata, boolean outputHtml)
+ throws SAXException, IOException {
+ if(outputHtml) {
+ AttributesImpl attributes = new AttributesImpl();
+ attributes.addAttribute("""", ""class"", ""class"", ""CDATA"", ""package-entry"");
+ handler.startElement(XHTML, ""div"", ""div"", attributes);
+ }
+
+ String name = metadata.get(Metadata.RESOURCE_NAME_KEY);
+ if (name != null && name.length() > 0 && outputHtml) {
+ handler.startElement(XHTML, ""h1"", ""h1"", new AttributesImpl());
+ char[] chars = name.toCharArray();
+ handler.characters(chars, 0, chars.length);
+ handler.endElement(XHTML, ""h1"", ""h1"");
+ }
+
+ // Use the delegate parser to parse this entry
+ try (TemporaryResources tmp = new TemporaryResources()) {
+ final TikaInputStream newStream = TikaInputStream.get(new CloseShieldInputStream(stream), tmp);
+ if (stream instanceof TikaInputStream) {
+ final Object container = ((TikaInputStream) stream).getOpenContainer();
+ if (container != null) {
+ newStream.setOpenContainer(container);
+ }
+ }
+ DELEGATING_PARSER.parse(
+ newStream,
+ new EmbeddedContentHandler(new BodyContentHandler(handler)),
+ metadata, context);
+ } catch (EncryptedDocumentException ede) {
+ // TODO: can we log a warning that we lack the password?
+ // For now, just skip the content
+ } catch (TikaException e) {
+ // TODO: can we log a warning somehow?
+ // Could not parse the entry, just skip the content
+ }
+
+ if(outputHtml) {
+ handler.endElement(XHTML, ""div"", ""div"");
+ }
+ }
+
+"
+1," private final void internalMapWrapper(ContextVersion contextVersion,
+ CharChunk path,
+ MappingData mappingData) throws IOException {
+
+ int pathOffset = path.getOffset();
+ int pathEnd = path.getEnd();
+ int servletPath = pathOffset;
+ boolean noServletPath = false;
+
+ int length = contextVersion.path.length();
+ if (length != (pathEnd - pathOffset)) {
+ servletPath = pathOffset + length;
+ } else {
+ noServletPath = true;
+ path.append('/');
+ pathOffset = path.getOffset();
+ pathEnd = path.getEnd();
+ servletPath = pathOffset+length;
+ }
+
+ path.setOffset(servletPath);
+
+ // Rule 1 -- Exact Match
+ MappedWrapper[] exactWrappers = contextVersion.exactWrappers;
+ internalMapExactWrapper(exactWrappers, path, mappingData);
+
+ // Rule 2 -- Prefix Match
+ boolean checkJspWelcomeFiles = false;
+ MappedWrapper[] wildcardWrappers = contextVersion.wildcardWrappers;
+ if (mappingData.wrapper == null) {
+ internalMapWildcardWrapper(wildcardWrappers, contextVersion.nesting,
+ path, mappingData);
+ if (mappingData.wrapper != null && mappingData.jspWildCard) {
+ char[] buf = path.getBuffer();
+ if (buf[pathEnd - 1] == '/') {
+ /*
+ * Path ending in '/' was mapped to JSP servlet based on
+ * wildcard match (e.g., as specified in url-pattern of a
+ * jsp-property-group.
+ * Force the context's welcome files, which are interpreted
+ * as JSP files (since they match the url-pattern), to be
+ * considered. See Bugzilla 27664.
+ */
+ mappingData.wrapper = null;
+ checkJspWelcomeFiles = true;
+ } else {
+ // See Bugzilla 27704
+ mappingData.wrapperPath.setChars(buf, path.getStart(),
+ path.getLength());
+ mappingData.pathInfo.recycle();
+ }
+ }
+ }
+
+ if(mappingData.wrapper == null && noServletPath) {
+ // The path is empty, redirect to ""/""
+ mappingData.redirectPath.setChars
+ (path.getBuffer(), pathOffset, pathEnd-pathOffset);
+ path.setEnd(pathEnd - 1);
+ return;
+ }
+
+ // Rule 3 -- Extension Match
+ MappedWrapper[] extensionWrappers = contextVersion.extensionWrappers;
+ if (mappingData.wrapper == null && !checkJspWelcomeFiles) {
+ internalMapExtensionWrapper(extensionWrappers, path, mappingData,
+ true);
+ }
+
+ // Rule 4 -- Welcome resources processing for servlets
+ if (mappingData.wrapper == null) {
+ boolean checkWelcomeFiles = checkJspWelcomeFiles;
+ if (!checkWelcomeFiles) {
+ char[] buf = path.getBuffer();
+ checkWelcomeFiles = (buf[pathEnd - 1] == '/');
+ }
+ if (checkWelcomeFiles) {
+ for (int i = 0; (i < contextVersion.welcomeResources.length)
+ && (mappingData.wrapper == null); i++) {
+ path.setOffset(pathOffset);
+ path.setEnd(pathEnd);
+ path.append(contextVersion.welcomeResources[i], 0,
+ contextVersion.welcomeResources[i].length());
+ path.setOffset(servletPath);
+
+ // Rule 4a -- Welcome resources processing for exact macth
+ internalMapExactWrapper(exactWrappers, path, mappingData);
+
+ // Rule 4b -- Welcome resources processing for prefix match
+ if (mappingData.wrapper == null) {
+ internalMapWildcardWrapper
+ (wildcardWrappers, contextVersion.nesting,
+ path, mappingData);
+ }
+
+ // Rule 4c -- Welcome resources processing
+ // for physical folder
+ if (mappingData.wrapper == null
+ && contextVersion.resources != null) {
+ String pathStr = path.toString();
+ WebResource file =
+ contextVersion.resources.getResource(pathStr);
+ if (file != null && file.isFile()) {
+ internalMapExtensionWrapper(extensionWrappers, path,
+ mappingData, true);
+ if (mappingData.wrapper == null
+ && contextVersion.defaultWrapper != null) {
+ mappingData.wrapper =
+ contextVersion.defaultWrapper.object;
+ mappingData.requestPath.setChars
+ (path.getBuffer(), path.getStart(),
+ path.getLength());
+ mappingData.wrapperPath.setChars
+ (path.getBuffer(), path.getStart(),
+ path.getLength());
+ mappingData.requestPath.setString(pathStr);
+ mappingData.wrapperPath.setString(pathStr);
+ }
+ }
+ }
+ }
+
+ path.setOffset(servletPath);
+ path.setEnd(pathEnd);
+ }
+
+ }
+
+ /* welcome file processing - take 2
+ * Now that we have looked for welcome files with a physical
+ * backing, now look for an extension mapping listed
+ * but may not have a physical backing to it. This is for
+ * the case of index.jsf, index.do, etc.
+ * A watered down version of rule 4
+ */
+ if (mappingData.wrapper == null) {
+ boolean checkWelcomeFiles = checkJspWelcomeFiles;
+ if (!checkWelcomeFiles) {
+ char[] buf = path.getBuffer();
+ checkWelcomeFiles = (buf[pathEnd - 1] == '/');
+ }
+ if (checkWelcomeFiles) {
+ for (int i = 0; (i < contextVersion.welcomeResources.length)
+ && (mappingData.wrapper == null); i++) {
+ path.setOffset(pathOffset);
+ path.setEnd(pathEnd);
+ path.append(contextVersion.welcomeResources[i], 0,
+ contextVersion.welcomeResources[i].length());
+ path.setOffset(servletPath);
+ internalMapExtensionWrapper(extensionWrappers, path,
+ mappingData, false);
+ }
+
+ path.setOffset(servletPath);
+ path.setEnd(pathEnd);
+ }
+ }
+
+
+ // Rule 7 -- Default servlet
+ if (mappingData.wrapper == null && !checkJspWelcomeFiles) {
+ if (contextVersion.defaultWrapper != null) {
+ mappingData.wrapper = contextVersion.defaultWrapper.object;
+ mappingData.requestPath.setChars
+ (path.getBuffer(), path.getStart(), path.getLength());
+ mappingData.wrapperPath.setChars
+ (path.getBuffer(), path.getStart(), path.getLength());
+ }
+ // Redirection to a folder
+ char[] buf = path.getBuffer();
+ if (contextVersion.resources != null && buf[pathEnd -1 ] != '/') {
+ String pathStr = path.toString();
+ WebResource file =
+ contextVersion.resources.getResource(pathStr);
+ if (file != null && file.isDirectory()) {
+ // Note: this mutates the path: do not do any processing
+ // after this (since we set the redirectPath, there
+ // shouldn't be any)
+ path.setOffset(pathOffset);
+ path.append('/');
+ mappingData.redirectPath.setChars
+ (path.getBuffer(), path.getStart(), path.getLength());
+ } else {
+ mappingData.requestPath.setString(pathStr);
+ mappingData.wrapperPath.setString(pathStr);
+ }
+ }
+ }
+
+ path.setOffset(pathOffset);
+ path.setEnd(pathEnd);
+
+ }
+
+
+ /**
+ * Exact mapping.
+ */
+"
+1," protected String buildErrorMessage(Throwable e, Object[] args) {
+ String errorKey = ""struts.messages.upload.error."" + e.getClass().getSimpleName();
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(""Preparing error message for key: [#0]"", errorKey);
+ }
+ return LocalizedTextUtil.findText(this.getClass(), errorKey, defaultLocale, e.getMessage(), args);
+ }
+
+ /**
+ * Get an enumeration of the parameter names for uploaded files
+ *
+ * @return enumeration of parameter names for uploaded files
+ */
+"
+1," public void testMultiValued() throws Exception {
+ Map entityAttrs = createMap(""name"", ""e"", ""url"", ""testdata.xml"",
+ XPathEntityProcessor.FOR_EACH, ""/root"");
+ List fields = new ArrayList();
+ fields.add(createMap(""column"", ""a"", ""xpath"", ""/root/a"", DataImporter.MULTI_VALUED, ""true""));
+ Context c = getContext(null,
+ new VariableResolverImpl(), getDataSource(testXml), Context.FULL_DUMP, fields, entityAttrs);
+ XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
+ xPathEntityProcessor.init(c);
+ List> result = new ArrayList>();
+ while (true) {
+ Map row = xPathEntityProcessor.nextRow();
+ if (row == null)
+ break;
+ result.add(row);
+ }
+ assertEquals(2, ((List)result.get(0).get(""a"")).size());
+ }
+
+ @Test
+"
+1," public String getInfo() {
+
+ return (info);
+
+ }
+
+
+ // --------------------------------------------------------- Public Methods
+
+
+ /**
+ * Authenticate the user making this request, based on the specified
+ * login configuration. Return true
if any specified
+ * constraint has been satisfied, or false
if we have
+ * created a response challenge already.
+ *
+ * @param request Request we are processing
+ * @param response Response we are creating
+ * @param config Login configuration describing how authentication
+ * should be performed
+ *
+ * @exception IOException if an input/output error occurs
+ */
+ @Override
+"
+1," public final void invoke(Request request, Response response)
+ throws IOException, ServletException {
+
+ // Select the Context to be used for this Request
+ Context context = request.getContext();
+ if (context == null) {
+ response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
+ sm.getString(""standardHost.noContext""));
+ return;
+ }
+
+ if (request.isAsyncSupported()) {
+ request.setAsyncSupported(context.getPipeline().isAsyncSupported());
+ }
+
+ boolean asyncAtStart = request.isAsync();
+ boolean asyncDispatching = request.isAsyncDispatching();
+
+ try {
+ context.bind(Globals.IS_SECURITY_ENABLED, MY_CLASSLOADER);
+
+ if (!asyncAtStart && !context.fireRequestInitEvent(request)) {
+ // Don't fire listeners during async processing (the listener
+ // fired for the request that called startAsync()).
+ // If a request init listener throws an exception, the request
+ // is aborted.
+ return;
+ }
+
+ // Ask this Context to process this request. Requests that are in
+ // async mode and are not being dispatched to this resource must be
+ // in error and have been routed here to check for application
+ // defined error pages.
+ try {
+ if (!asyncAtStart || asyncDispatching) {
+ context.getPipeline().getFirst().invoke(request, response);
+ } else {
+ // Make sure this request/response is here because an error
+ // report is required.
+ if (!response.isErrorReportRequired()) {
+ throw new IllegalStateException(sm.getString(""standardHost.asyncStateError""));
+ }
+ }
+ } catch (Throwable t) {
+ ExceptionUtils.handleThrowable(t);
+ container.getLogger().error(""Exception Processing "" + request.getRequestURI(), t);
+ // If a new error occurred while trying to report a previous
+ // error allow the original error to be reported.
+ if (!response.isErrorReportRequired()) {
+ request.setAttribute(RequestDispatcher.ERROR_EXCEPTION, t);
+ throwable(request, response, t);
+ }
+ }
+
+ // Now that the request/response pair is back under container
+ // control lift the suspension so that the error handling can
+ // complete and/or the container can flush any remaining data
+ response.setSuspended(false);
+
+ Throwable t = (Throwable) request.getAttribute(RequestDispatcher.ERROR_EXCEPTION);
+
+ // Protect against NPEs if the context was destroyed during a
+ // long running request.
+ if (!context.getState().isAvailable()) {
+ return;
+ }
+
+ // Look for (and render if found) an application level error page
+ if (response.isErrorReportRequired()) {
+ if (t != null) {
+ throwable(request, response, t);
+ } else {
+ status(request, response);
+ }
+ }
+
+ if (!request.isAsync() && !asyncAtStart) {
+ context.fireRequestDestroyEvent(request);
+ }
+ } finally {
+ // Access a session (if present) to update last accessed time, based
+ // on a strict interpretation of the specification
+ if (ACCESS_SESSION) {
+ request.getSession(false);
+ }
+
+ context.unbind(Globals.IS_SECURITY_ENABLED, MY_CLASSLOADER);
+ }
+ }
+
+
+ // -------------------------------------------------------- Private Methods
+
+ /**
+ * Handle the HTTP status code (and corresponding message) generated
+ * while processing the specified Request to produce the specified
+ * Response. Any exceptions that occur during generation of the error
+ * report are logged and swallowed.
+ *
+ * @param request The request being processed
+ * @param response The response being generated
+ */
+"
+1," private static void addHeadersToRequest(URLConnection connection, JSONObject headers) {
+ try {
+ for (Iterator> iter = headers.keys(); iter.hasNext(); ) {
+ /* RFC 2616 says that non-ASCII characters and control
+ * characters are not allowed in header names or values.
+ * Additionally, spaces are not allowed in header names.
+ * RFC 2046 Quoted-printable encoding may be used to encode
+ * arbitrary characters, but we donon- not do that encoding here.
+ */
+ String headerKey = iter.next().toString();
+ headerKey = headerKey.replaceAll(""\\n"","""")
+ .replaceAll(""\\s+"","""")
+ .replaceAll(""[^\\x20-\\x7E]+"", """");
+
+ JSONArray headerValues = headers.optJSONArray(headerKey);
+ if (headerValues == null) {
+ headerValues = new JSONArray();
+
+ /* RFC 2616 also says that any amount of consecutive linear
+ * whitespace within a header value can be replaced with a
+ * single space character, without affecting the meaning of
+ * that value.
+ */
+
+ String headerValue = headers.getString(headerKey);
+ String finalValue = headerValue.replaceAll(""\\s+"", "" "").replaceAll(""\\n"","" "").replaceAll(""[^\\x20-\\x7E]+"", "" "");
+ headerValues.put(finalValue);
+ }
+
+ connection.setRequestProperty(headerKey, headerValues.getString(0));
+ for (int i = 1; i < headerValues.length(); ++i) {
+ connection.addRequestProperty(headerKey, headerValues.getString(i));
+ }
+ }
+ } catch (JSONException e1) {
+ // No headers to be manipulated!
+ }
+ }
+
+"
+1," public HttpSecurity and() {
+ return HttpSecurity.this;
+ }
+
+"
+1," void setExtensionsTable(StylesheetRoot sroot)
+ throws javax.xml.transform.TransformerException
+ {
+ try
+ {
+ if (sroot.getExtensions() != null)
+ m_extensionsTable = new ExtensionsTable(sroot);
+ }
+ catch (javax.xml.transform.TransformerException te)
+ {te.printStackTrace();}
+ }
+
+ //== Implementation of the XPath ExtensionsProvider interface.
+
+"
+1," public void testTrustedMethodPrevention() {
+ Response response = WebClient.create(endPoint + TIKA_PATH)
+ .type(""application/pdf"")
+ .accept(""text/plain"")
+ .header(TikaResource.X_TIKA_OCR_HEADER_PREFIX +
+ ""trustedPageSeparator"",
+ ""\u0010"")
+ .put(ClassLoader.getSystemResourceAsStream(""testOCR.pdf""));
+ assertEquals(500, response.getStatus());
+
+ }
+"
+1," public void performTest()
+ throws Exception
+ {
+ testCompat();
+ testNONEwithDSA();
+
+ testDSAsha3(NISTObjectIdentifiers.id_dsa_with_sha3_224, 224, new BigInteger(""613202af2a7f77e02b11b5c3a5311cf6b412192bc0032aac3ec127faebfc6bd0"", 16));
+ testDSAsha3(NISTObjectIdentifiers.id_dsa_with_sha3_256, 256, new BigInteger(""2450755c5e15a691b121bc833b97864e34a61ee025ecec89289c949c1858091e"", 16));
+ testDSAsha3(NISTObjectIdentifiers.id_dsa_with_sha3_384, 384, new BigInteger(""7aad97c0b71bb1e1a6483b6948a03bbe952e4780b0cee699a11731f90d84ddd1"", 16));
+ testDSAsha3(NISTObjectIdentifiers.id_dsa_with_sha3_512, 512, new BigInteger(""725ad64d923c668e64e7c3898b5efde484cab49ce7f98c2885d2a13a9e355ad4"", 16));
+
+ testECDSA239bitPrime();
+ testNONEwithECDSA239bitPrime();
+ testECDSA239bitBinary();
+ testECDSA239bitBinary(""RIPEMD160withECDSA"", TeleTrusTObjectIdentifiers.ecSignWithRipemd160);
+ testECDSA239bitBinary(""SHA1withECDSA"", TeleTrusTObjectIdentifiers.ecSignWithSha1);
+ testECDSA239bitBinary(""SHA224withECDSA"", X9ObjectIdentifiers.ecdsa_with_SHA224);
+ testECDSA239bitBinary(""SHA256withECDSA"", X9ObjectIdentifiers.ecdsa_with_SHA256);
+ testECDSA239bitBinary(""SHA384withECDSA"", X9ObjectIdentifiers.ecdsa_with_SHA384);
+ testECDSA239bitBinary(""SHA512withECDSA"", X9ObjectIdentifiers.ecdsa_with_SHA512);
+ testECDSA239bitBinary(""SHA1withCVC-ECDSA"", EACObjectIdentifiers.id_TA_ECDSA_SHA_1);
+ testECDSA239bitBinary(""SHA224withCVC-ECDSA"", EACObjectIdentifiers.id_TA_ECDSA_SHA_224);
+ testECDSA239bitBinary(""SHA256withCVC-ECDSA"", EACObjectIdentifiers.id_TA_ECDSA_SHA_256);
+ testECDSA239bitBinary(""SHA384withCVC-ECDSA"", EACObjectIdentifiers.id_TA_ECDSA_SHA_384);
+ testECDSA239bitBinary(""SHA512withCVC-ECDSA"", EACObjectIdentifiers.id_TA_ECDSA_SHA_512);
+
+ testECDSAP256sha3(NISTObjectIdentifiers.id_ecdsa_with_sha3_224, 224, new BigInteger(""84d7d8e68e405064109cd9fc3e3026d74d278aada14ce6b7a9dd0380c154dc94"", 16));
+ testECDSAP256sha3(NISTObjectIdentifiers.id_ecdsa_with_sha3_256, 256, new BigInteger(""99a43bdab4af989aaf2899079375642f2bae2dce05bcd8b72ec8c4a8d9a143f"", 16));
+ testECDSAP256sha3(NISTObjectIdentifiers.id_ecdsa_with_sha3_384, 384, new BigInteger(""aa27726509c37aaf601de6f7e01e11c19add99530c9848381c23365dc505b11a"", 16));
+ testECDSAP256sha3(NISTObjectIdentifiers.id_ecdsa_with_sha3_512, 512, new BigInteger(""f8306b57a1f5068bf12e53aabaae39e2658db39bc56747eaefb479995130ad16"", 16));
+
+ testGeneration();
+ testParameters();
+ testDSA2Parameters();
+ testNullParameters();
+ testValidate();
+ testModified();
+ }
+
+"
+1," public void snapshotRecoveryTest() throws Exception {
+ logger.info(""--> start node A"");
+ String nodeA = internalCluster().startNode(settingsBuilder().put(""gateway.type"", ""local""));
+
+ logger.info(""--> create repository"");
+ assertAcked(client().admin().cluster().preparePutRepository(REPO_NAME)
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder()
+ .put(""location"", newTempDir(LifecycleScope.SUITE))
+ .put(""compress"", false)
+ ).get());
+
+ ensureGreen();
+
+ logger.info(""--> create index on node: {}"", nodeA);
+ createAndPopulateIndex(INDEX_NAME, 1, SHARD_COUNT, REPLICA_COUNT);
+
+ logger.info(""--> snapshot"");
+ CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot(REPO_NAME, SNAP_NAME)
+ .setWaitForCompletion(true).setIndices(INDEX_NAME).get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
+
+ assertThat(client().admin().cluster().prepareGetSnapshots(REPO_NAME).setSnapshots(SNAP_NAME).get()
+ .getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+
+ client().admin().indices().prepareClose(INDEX_NAME).execute().actionGet();
+
+ logger.info(""--> restore"");
+ RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster()
+ .prepareRestoreSnapshot(REPO_NAME, SNAP_NAME).setWaitForCompletion(true).execute().actionGet();
+ int totalShards = restoreSnapshotResponse.getRestoreInfo().totalShards();
+ assertThat(totalShards, greaterThan(0));
+
+ ensureGreen();
+
+ logger.info(""--> request recoveries"");
+ RecoveryResponse response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet();
+
+ for (Map.Entry> shardRecoveryResponse : response.shardResponses().entrySet()) {
+
+ assertThat(shardRecoveryResponse.getKey(), equalTo(INDEX_NAME));
+ List shardRecoveryResponses = shardRecoveryResponse.getValue();
+ assertThat(shardRecoveryResponses.size(), equalTo(totalShards));
+
+ for (ShardRecoveryResponse shardResponse : shardRecoveryResponses) {
+ assertRecoveryState(shardResponse.recoveryState(), 0, Type.SNAPSHOT, Stage.DONE, null, nodeA, true);
+ validateIndexRecoveryState(shardResponse.recoveryState().getIndex());
+ }
+ }
+ }
+
+"
+1," public synchronized Principal authenticate(Connection dbConnection,
+ String username,
+ String credentials) {
+ // No user or no credentials
+ // Can't possibly authenticate, don't bother the database then
+ if (username == null || credentials == null) {
+ if (containerLog.isTraceEnabled())
+ containerLog.trace(sm.getString(""jdbcRealm.authenticateFailure"",
+ username));
+ return null;
+ }
+
+ // Look up the user's credentials
+ String dbCredentials = getPassword(username);
+
+ if (dbCredentials == null) {
+ // User was not found in the database.
+
+ if (containerLog.isTraceEnabled())
+ containerLog.trace(sm.getString(""jdbcRealm.authenticateFailure"",
+ username));
+ return null;
+ }
+
+ // Validate the user's credentials
+ boolean validated = getCredentialHandler().matches(credentials, dbCredentials);
+
+ if (validated) {
+ if (containerLog.isTraceEnabled())
+ containerLog.trace(sm.getString(""jdbcRealm.authenticateSuccess"",
+ username));
+ } else {
+ if (containerLog.isTraceEnabled())
+ containerLog.trace(sm.getString(""jdbcRealm.authenticateFailure"",
+ username));
+ return null;
+ }
+
+ ArrayList roles = getRoles(username);
+
+ // Create and return a suitable Principal for this user
+ return (new GenericPrincipal(username, credentials, roles));
+ }
+
+
+ /**
+ * Close the specified database connection.
+ *
+ * @param dbConnection The connection to be closed
+ */
+"
+1," public void performTest()
+ throws Exception
+ {
+ byte[] testIv = { 1, 2, 3, 4, 5, 6, 7, 8 };
+
+ ASN1Encodable[] values = {
+ new CAST5CBCParameters(testIv, 128),
+ new NetscapeCertType(NetscapeCertType.smime),
+ new VerisignCzagExtension(new DERIA5String(""hello"")),
+ new IDEACBCPar(testIv),
+ new NetscapeRevocationURL(new DERIA5String(""http://test""))
+ };
+
+ byte[] data = Base64.decode(""MA4ECAECAwQFBgcIAgIAgAMCBSAWBWhlbGxvMAoECAECAwQFBgcIFgtodHRwOi8vdGVzdA=="");
+
+ ByteArrayOutputStream bOut = new ByteArrayOutputStream();
+ ASN1OutputStream aOut = new ASN1OutputStream(bOut);
+
+ for (int i = 0; i != values.length; i++)
+ {
+ aOut.writeObject(values[i]);
+ }
+
+ ASN1Primitive[] readValues = new ASN1Primitive[values.length];
+
+ if (!isSameAs(bOut.toByteArray(), data))
+ {
+ fail(""Failed data check"");
+ }
+
+ ByteArrayInputStream bIn = new ByteArrayInputStream(bOut.toByteArray());
+ ASN1InputStream aIn = new ASN1InputStream(bIn);
+
+ for (int i = 0; i != values.length; i++)
+ {
+ ASN1Primitive o = aIn.readObject();
+ if (!values[i].equals(o))
+ {
+ fail(""Failed equality test for "" + o);
+ }
+
+ if (o.hashCode() != values[i].hashCode())
+ {
+ fail(""Failed hashCode test for "" + o);
+ }
+ }
+
+ shouldFailOnExtraData();
+ }
+
+"
+1," public void setEncoding( String enc ) {
+ if( !byteC.isNull() ) {
+ // if the encoding changes we need to reset the converion results
+ charC.recycle();
+ hasStrValue=false;
+ }
+ byteC.setEncoding(enc);
+ }
+
+ /**
+ * Sets the content to be a char[]
+ *
+ * @param c the bytes
+ * @param off the start offset of the bytes
+ * @param len the length of the bytes
+ */
+"
+1," protected String buildErrorMessage(Throwable e, Object[] args) {
+ String errorKey = ""struts.messages.upload.error."" + e.getClass().getSimpleName();
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(""Preparing error message for key: [#0]"", errorKey);
+ }
+ return LocalizedTextUtil.findText(this.getClass(), errorKey, defaultLocale, e.getMessage(), args);
+ }
+
+"
+1," private BigInteger[] derDecode(
+ byte[] encoding)
+ throws IOException
+ {
+ ASN1Sequence s = (ASN1Sequence)ASN1Primitive.fromByteArray(encoding);
+ if (s.size() != 2)
+ {
+ throw new IOException(""malformed signature"");
+ }
+
+ return new BigInteger[]{
+ ((ASN1Integer)s.getObjectAt(0)).getValue(),
+ ((ASN1Integer)s.getObjectAt(1)).getValue()
+ };
+ }
+
+"
+1," public String encodeCharacter( char[] immune, Character c )
+ {
+ String cStr = String.valueOf(c.charValue());
+ byte[] bytes;
+ StringBuilder sb;
+
+ if(UNENCODED_SET.contains(c))
+ return cStr;
+
+ bytes = toUtf8Bytes(cStr);
+ sb = new StringBuilder(bytes.length * 3);
+ for(byte b : bytes)
+ appendTwoUpperHex(sb.append('%'), b);
+ return sb.toString();
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Formats all are legal both upper/lower case:
+ * %hh;
+ *
+ * @param input
+ * encoded character using percent characters (such as URL encoding)
+ */
+"
+1," protected void setAuthenticateHeader(Request request,
+ Response response,
+ LoginConfig config,
+ String nOnce) {
+
+ // Get the realm name
+ String realmName = config.getRealmName();
+ if (realmName == null)
+ realmName = REALM_NAME;
+
+ byte[] buffer = null;
+ synchronized (md5Helper) {
+ buffer = md5Helper.digest(nOnce.getBytes());
+ }
+
+ String authenticateHeader = ""Digest realm=\"""" + realmName + ""\"", ""
+ + ""qop=\""auth\"", nonce=\"""" + nOnce + ""\"", "" + ""opaque=\""""
+ + md5Encoder.encode(buffer) + ""\"""";
+ response.setHeader(""WWW-Authenticate"", authenticateHeader);
+
+ }
+
+
+"
+1," private Foo writeAndRead(Foo foo) throws IOException, ClassNotFoundException {
+ writeObject(oos, foo);
+ ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
+ inputStream = new ObjectInputStream(bis);
+ Foo fooBack = readFooObject(inputStream);
+ inputStream.close();
+ return fooBack;
+ }
+
+"
+1," private static void processHeaderConfig(MultivaluedMap httpHeaders, Object object, String key, String prefix) {
+
+ try {
+ String property = StringUtils.removeStart(key, prefix);
+ Field field = null;
+ try {
+ object.getClass().getDeclaredField(StringUtils.uncapitalize(property));
+ } catch (NoSuchFieldException e) {
+ //swallow
+ }
+ String setter = property;
+ setter = ""set""+setter.substring(0,1).toUpperCase(Locale.US)+setter.substring(1);
+ //default assume string class
+ //if there's a more specific type, e.g. double, int, boolean
+ //try that.
+ Class clazz = String.class;
+ if (field != null) {
+ if (field.getType() == int.class) {
+ clazz = int.class;
+ } else if (field.getType() == double.class) {
+ clazz = double.class;
+ } else if (field.getType() == boolean.class) {
+ clazz = boolean.class;
+ }
+ }
+
+ Method m = tryToGetMethod(object, setter, clazz);
+ //if you couldn't find more specific setter, back off
+ //to string setter and try that.
+ if (m == null && clazz != String.class) {
+ m = tryToGetMethod(object, setter, String.class);
+ }
+
+ if (m != null) {
+ String val = httpHeaders.getFirst(key);
+ val = val.trim();
+ if (clazz == String.class) {
+ checkTrustWorthy(setter, val);
+ m.invoke(object, val);
+ } else if (clazz == int.class) {
+ m.invoke(object, Integer.parseInt(val));
+ } else if (clazz == double.class) {
+ m.invoke(object, Double.parseDouble(val));
+ } else if (clazz == boolean.class) {
+ m.invoke(object, Boolean.parseBoolean(val));
+ } else {
+ throw new IllegalArgumentException(""setter must be String, int, double or boolean...for now"");
+ }
+ } else {
+ throw new NoSuchMethodException(""Couldn't find: ""+setter);
+ }
+
+ } catch (Throwable ex) {
+ throw new WebApplicationException(String.format(Locale.ROOT,
+ ""%s is an invalid %s header"", key, X_TIKA_OCR_HEADER_PREFIX));
+ }
+ }
+
+"
+1," public KeystoreInstance createKeystore(String name, char[] password, String keystoreType) throws KeystoreException {
+ File test = new File(directory, name);
+ if(test.exists()) {
+ throw new IllegalArgumentException(""Keystore already exists ""+test.getAbsolutePath()+""!"");
+ }
+ try {
+ KeyStore keystore = KeyStore.getInstance(keystoreType);
+ keystore.load(null, password);
+ OutputStream out = new BufferedOutputStream(new FileOutputStream(test));
+ keystore.store(out, password);
+ out.flush();
+ out.close();
+ return getKeystore(name, keystoreType);
+ } catch (KeyStoreException e) {
+ throw new KeystoreException(""Unable to create keystore"", e);
+ } catch (IOException e) {
+ throw new KeystoreException(""Unable to create keystore"", e);
+ } catch (NoSuchAlgorithmException e) {
+ throw new KeystoreException(""Unable to create keystore"", e);
+ } catch (CertificateException e) {
+ throw new KeystoreException(""Unable to create keystore"", e);
+ }
+ }
+
+"
+1," public void setDefaultLockoutPolicy(LockoutPolicy defaultLockoutPolicy) {
+ this.defaultLockoutPolicy = defaultLockoutPolicy;
+ }
+"
+1," protected void initializeFilters(int maxTrailerSize) {
+ // Create and add the identity filters.
+ getInputBuffer().addFilter(new IdentityInputFilter());
+ getOutputBuffer().addFilter(new IdentityOutputFilter());
+
+ // Create and add the chunked filters.
+ getInputBuffer().addFilter(new ChunkedInputFilter(maxTrailerSize));
+ getOutputBuffer().addFilter(new ChunkedOutputFilter());
+
+ // Create and add the void filters.
+ getInputBuffer().addFilter(new VoidInputFilter());
+ getOutputBuffer().addFilter(new VoidOutputFilter());
+
+ // Create and add buffered input filter
+ getInputBuffer().addFilter(new BufferedInputFilter());
+
+ // Create and add the chunked filters.
+ //getInputBuffer().addFilter(new GzipInputFilter());
+ getOutputBuffer().addFilter(new GzipOutputFilter());
+
+ pluggableFilterIndex = getInputBuffer().getFilters().length;
+ }
+
+
+ /**
+ * Add an input filter to the current request.
+ *
+ * @return false if the encoding was not found (which would mean it is
+ * unsupported)
+ */
+"
+1," private BeanReference createSecurityFilterChainBean(Element element,
+ ParserContext pc, List> filterChain) {
+ BeanMetadataElement filterChainMatcher;
+
+ String requestMatcherRef = element.getAttribute(ATT_REQUEST_MATCHER_REF);
+ String filterChainPattern = element.getAttribute(ATT_PATH_PATTERN);
+
+ if (StringUtils.hasText(requestMatcherRef)) {
+ if (StringUtils.hasText(filterChainPattern)) {
+ pc.getReaderContext().error(
+ ""You can't define a pattern and a request-matcher-ref for the ""
+ + ""same filter chain"", pc.extractSource(element));
+ }
+ filterChainMatcher = new RuntimeBeanReference(requestMatcherRef);
+
+ }
+ else if (StringUtils.hasText(filterChainPattern)) {
+ filterChainMatcher = MatcherType.fromElement(element).createMatcher(
+ filterChainPattern, null);
+ }
+ else {
+ filterChainMatcher = new RootBeanDefinition(AnyRequestMatcher.class);
+ }
+
+ BeanDefinitionBuilder filterChainBldr = BeanDefinitionBuilder
+ .rootBeanDefinition(DefaultSecurityFilterChain.class);
+ filterChainBldr.addConstructorArgValue(filterChainMatcher);
+ filterChainBldr.addConstructorArgValue(filterChain);
+
+ BeanDefinition filterChainBean = filterChainBldr.getBeanDefinition();
+
+ String id = element.getAttribute(""name"");
+ if (!StringUtils.hasText(id)) {
+ id = element.getAttribute(""id"");
+ if (!StringUtils.hasText(id)) {
+ id = pc.getReaderContext().generateBeanName(filterChainBean);
+ }
+ }
+
+ pc.registerBeanComponent(new BeanComponentDefinition(filterChainBean, id));
+
+ return new RuntimeBeanReference(id);
+ }
+
+"
+1," public boolean check(String path, Resource resource)
+ {
+ int slash = path.lastIndexOf('/');
+ if (slash<0)
+ return false;
+ String suffix=path.substring(slash);
+ return resource.getAlias().toString().endsWith(suffix);
+ }
+ }
+}
+"
+1," public Mapper getMapper() {
+
+ return (mapper);
+
+ }
+
+
+ /**
+ * Return the maximum size of a POST which will be automatically
+ * parsed by the container.
+ */
+"
+1," public Object getValue(Object parent) {
+ return ( (Object[]) parent )[index];
+ }
+
+ @Override
+"
+1," public String createDB(String dbName) {
+ String result = DB_CREATED_MSG + "": "" + dbName;
+
+ Connection conn = null;
+ try {
+ conn = DerbyConnectionUtil.getDerbyConnection(dbName,
+ DerbyConnectionUtil.CREATE_DB_PROP);
+ } catch (Throwable e) {
+ if (e instanceof SQLException) {
+ result = getSQLError((SQLException) e);
+ } else {
+ result = e.getMessage();
+ }
+ } finally {
+ // close DB connection
+ try {
+ if (conn != null) {
+ conn.close();
+ }
+ } catch (SQLException e) {
+ result = ""Problem closing DB connection"";
+ }
+ }
+
+ return result;
+ }
+
+"
+1," public URL resolveConfig(String path) throws FailedToResolveConfigException {
+ String origPath = path;
+ // first, try it as a path on the file system
+ File f1 = new File(path);
+ if (f1.exists()) {
+ try {
+ return f1.toURI().toURL();
+ } catch (MalformedURLException e) {
+ throw new FailedToResolveConfigException(""Failed to resolve path ["" + f1 + ""]"", e);
+ }
+ }
+ if (path.startsWith(""/"")) {
+ path = path.substring(1);
+ }
+ // next, try it relative to the config location
+ File f2 = new File(configFile, path);
+ if (f2.exists()) {
+ try {
+ return f2.toURI().toURL();
+ } catch (MalformedURLException e) {
+ throw new FailedToResolveConfigException(""Failed to resolve path ["" + f2 + ""]"", e);
+ }
+ }
+ // try and load it from the classpath directly
+ URL resource = settings.getClassLoader().getResource(path);
+ if (resource != null) {
+ return resource;
+ }
+ // try and load it from the classpath with config/ prefix
+ if (!path.startsWith(""config/"")) {
+ resource = settings.getClassLoader().getResource(""config/"" + path);
+ if (resource != null) {
+ return resource;
+ }
+ }
+ throw new FailedToResolveConfigException(""Failed to resolve config path ["" + origPath + ""], tried file path ["" + f1 + ""], path file ["" + f2 + ""], and classpath"");
+ }
+"
+1," public static boolean normalize(MessageBytes uriMB) {
+
+ ByteChunk uriBC = uriMB.getByteChunk();
+ byte[] b = uriBC.getBytes();
+ int start = uriBC.getStart();
+ int end = uriBC.getEnd();
+
+ // URL * is acceptable
+ if ((end - start == 1) && b[start] == (byte) '*')
+ return true;
+
+ int pos = 0;
+ int index = 0;
+
+ // Replace '\' with '/'
+ // Check for null byte
+ for (pos = start; pos < end; pos++) {
+ if (b[pos] == (byte) '\\')
+ b[pos] = (byte) '/';
+ if (b[pos] == (byte) 0)
+ return false;
+ }
+
+ // The URL must start with '/'
+ if (b[start] != (byte) '/') {
+ return false;
+ }
+
+ // Replace ""//"" with ""/""
+ for (pos = start; pos < (end - 1); pos++) {
+ if (b[pos] == (byte) '/') {
+ while ((pos + 1 < end) && (b[pos + 1] == (byte) '/')) {
+ copyBytes(b, pos, pos + 1, end - pos - 1);
+ end--;
+ }
+ }
+ }
+
+ // If the URI ends with ""/."" or ""/.."", then we append an extra ""/""
+ // Note: It is possible to extend the URI by 1 without any side effect
+ // as the next character is a non-significant WS.
+ if (((end - start) >= 2) && (b[end - 1] == (byte) '.')) {
+ if ((b[end - 2] == (byte) '/')
+ || ((b[end - 2] == (byte) '.')
+ && (b[end - 3] == (byte) '/'))) {
+ b[end] = (byte) '/';
+ end++;
+ }
+ }
+
+ uriBC.setEnd(end);
+
+ index = 0;
+
+ // Resolve occurrences of ""/./"" in the normalized path
+ while (true) {
+ index = uriBC.indexOf(""/./"", 0, 3, index);
+ if (index < 0)
+ break;
+ copyBytes(b, start + index, start + index + 2,
+ end - start - index - 2);
+ end = end - 2;
+ uriBC.setEnd(end);
+ }
+
+ index = 0;
+
+ // Resolve occurrences of ""/../"" in the normalized path
+ while (true) {
+ index = uriBC.indexOf(""/../"", 0, 4, index);
+ if (index < 0)
+ break;
+ // Prevent from going outside our context
+ if (index == 0)
+ return false;
+ int index2 = -1;
+ for (pos = start + index - 1; (pos >= 0) && (index2 < 0); pos --) {
+ if (b[pos] == (byte) '/') {
+ index2 = pos;
+ }
+ }
+ copyBytes(b, start + index2, start + index + 3,
+ end - start - index - 3);
+ end = end + index2 - index - 3;
+ uriBC.setEnd(end);
+ index = index2;
+ }
+
+ uriBC.setBytes(b, start, end);
+
+ return true;
+
+ }
+
+
+ // ------------------------------------------------------ Protected Methods
+
+
+ /**
+ * Copy an array of bytes to a different position. Used during
+ * normalization.
+ */
+"
+1," protected void handShake() throws IOException {
+ ssl.setNeedClientAuth(true);
+ ssl.startHandshake();
+ }
+ /**
+ * Copied from org.apache.catalina.valves.CertificateValve
+ */
+"
+1," public void testIssue1599() throws Exception
+ {
+ final String JSON = aposToQuotes(
+ ""{'id': 124,\n""
++"" 'obj':[ 'com.sun.org.apache.xalan.internal.xsltc.trax.TemplatesImpl',\n""
++"" {\n""
++"" 'transletBytecodes' : [ 'AAIAZQ==' ],\n""
++"" 'transletName' : 'a.b',\n""
++"" 'outputProperties' : { }\n""
++"" }\n""
++"" ]\n""
++""}""
+ );
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.enableDefaultTyping();
+ try {
+ mapper.readValue(JSON, Bean1599.class);
+ fail(""Should not pass"");
+ } catch (JsonMappingException e) {
+ verifyException(e, ""Illegal type"");
+ verifyException(e, ""to deserialize"");
+ verifyException(e, ""prevented for security reasons"");
+ }
+ }
+"
+1," public void testWildcardBehaviour_snapshotRestore() throws Exception {
+ createIndex(""foobar"");
+ ensureGreen(""foobar"");
+ waitForRelocation();
+
+ PutRepositoryResponse putRepositoryResponse = client().admin().cluster().preparePutRepository(""dummy-repo"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder().put(""location"", newTempDir())).get();
+ assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
+ client().admin().cluster().prepareCreateSnapshot(""dummy-repo"", ""snap1"").setWaitForCompletion(true).get();
+
+ IndicesOptions options = IndicesOptions.fromOptions(false, false, true, false);
+ verify(snapshot(""snap2"", ""foo*"", ""bar*"").setIndicesOptions(options), true);
+ verify(restore(""snap1"", ""foo*"", ""bar*"").setIndicesOptions(options), true);
+
+ options = IndicesOptions.strictExpandOpen();
+ verify(snapshot(""snap2"", ""foo*"", ""bar*"").setIndicesOptions(options), false);
+ verify(restore(""snap2"", ""foo*"", ""bar*"").setIndicesOptions(options), false);
+
+ assertAcked(prepareCreate(""barbaz""));
+ //TODO: temporary work-around for #5531
+ ensureGreen(""barbaz"");
+ waitForRelocation();
+ options = IndicesOptions.fromOptions(false, false, true, false);
+ verify(snapshot(""snap3"", ""foo*"", ""bar*"").setIndicesOptions(options), false);
+ verify(restore(""snap3"", ""foo*"", ""bar*"").setIndicesOptions(options), false);
+
+ options = IndicesOptions.fromOptions(false, false, true, false);
+ verify(snapshot(""snap4"", ""foo*"", ""baz*"").setIndicesOptions(options), true);
+ verify(restore(""snap3"", ""foo*"", ""baz*"").setIndicesOptions(options), true);
+ }
+
+ @Test
+"
+1," public synchronized JettyHttpBinding getJettyBinding(HttpClient httpClient) {
+ if (jettyBinding == null) {
+ jettyBinding = new DefaultJettyHttpBinding();
+ jettyBinding.setHeaderFilterStrategy(getHeaderFilterStrategy());
+ jettyBinding.setThrowExceptionOnFailure(isThrowExceptionOnFailure());
+ jettyBinding.setTransferException(isTransferException());
+ jettyBinding.setOkStatusCodeRange(getOkStatusCodeRange());
+ }
+ return jettyBinding;
+ }
+
+"
+1," protected final String getWindowOpenJavaScript()
+ {
+ AppendingStringBuffer buffer = new AppendingStringBuffer(500);
+
+ if (isCustomComponent() == true)
+ {
+ buffer.append(""var element = document.getElementById(\"""");
+ buffer.append(getContentMarkupId());
+ buffer.append(""\"");\n"");
+ }
+
+ buffer.append(""var settings = new Object();\n"");
+
+ appendAssignment(buffer, ""settings.minWidth"", getMinimalWidth());
+ appendAssignment(buffer, ""settings.minHeight"", getMinimalHeight());
+ appendAssignment(buffer, ""settings.className"", getCssClassName());
+ appendAssignment(buffer, ""settings.width"", getInitialWidth());
+
+ if ((isUseInitialHeight() == true) || (isCustomComponent() == false))
+ {
+ appendAssignment(buffer, ""settings.height"", getInitialHeight());
+ }
+ else
+ {
+ buffer.append(""settings.height=null;\n"");
+ }
+
+ appendAssignment(buffer, ""settings.resizable"", isResizable());
+
+ if (isResizable() == false)
+ {
+ appendAssignment(buffer, ""settings.widthUnit"", getWidthUnit());
+ appendAssignment(buffer, ""settings.heightUnit"", getHeightUnit());
+ }
+
+ if (isCustomComponent() == false)
+ {
+ Page page = createPage();
+ if (page == null)
+ {
+ throw new WicketRuntimeException(""Error creating page for modal dialog."");
+ }
+ CharSequence pageUrl;
+ RequestCycle requestCycle = RequestCycle.get();
+
+ page.getSession().getPageManager().touchPage(page);
+ if (page.isPageStateless())
+ {
+ pageUrl = requestCycle.urlFor(page.getClass(), page.getPageParameters());
+ }
+ else
+ {
+ IRequestHandler handler = new RenderPageRequestHandler(new PageProvider(page));
+ pageUrl = requestCycle.urlFor(handler);
+ }
+
+ appendAssignment(buffer, ""settings.src"", pageUrl);
+ }
+ else
+ {
+ buffer.append(""settings.element=element;\n"");
+ }
+
+ if (getCookieName() != null)
+ {
+ appendAssignment(buffer, ""settings.cookieId"", getCookieName());
+ }
+
+ Object title = getTitle() != null ? getTitle().getObject() : null;
+ if (title != null)
+ {
+ appendAssignment(buffer, ""settings.title"", escapeQuotes(title.toString()));
+ }
+
+ if (getMaskType() == MaskType.TRANSPARENT)
+ {
+ buffer.append(""settings.mask=\""transparent\"";\n"");
+ }
+ else if (getMaskType() == MaskType.SEMI_TRANSPARENT)
+ {
+ buffer.append(""settings.mask=\""semi-transparent\"";\n"");
+ }
+
+ appendAssignment(buffer, ""settings.autoSize"", autoSize);
+
+ appendAssignment(buffer, ""settings.unloadConfirmation"", showUnloadConfirmation());
+
+ // set true if we set a windowclosedcallback
+ boolean haveCloseCallback = false;
+
+ // in case user is interested in window close callback or we have a pagemap to clean attach
+ // notification request
+ if (windowClosedCallback != null)
+ {
+ WindowClosedBehavior behavior = getBehaviors(WindowClosedBehavior.class).get(0);
+ buffer.append(""settings.onClose = function() { "");
+ buffer.append(behavior.getCallbackScript());
+ buffer.append("" };\n"");
+
+ haveCloseCallback = true;
+ }
+
+ // in case we didn't set windowclosecallback, we need at least callback on close button, to
+ // close window property (thus cleaning the shown flag)
+ if ((closeButtonCallback != null) || (haveCloseCallback == false))
+ {
+ CloseButtonBehavior behavior = getBehaviors(CloseButtonBehavior.class).get(0);
+ buffer.append(""settings.onCloseButton = function() { "");
+ buffer.append(behavior.getCallbackScript());
+ buffer.append("";return false;};\n"");
+ }
+
+ postProcessSettings(buffer);
+
+ buffer.append(getShowJavaScript());
+ return buffer.toString();
+ }
+
+ /**
+ *
+ * @param buffer
+ * @param key
+ * @param value
+ */
+"
+1," private void addUserFromChangeSet(ChangeLogSet.Entry change, Set to, Set cc, Set bcc, EnvVars env, TaskListener listener, RecipientProviderUtilities.IDebug debug) {
+ User user = change.getAuthor();
+ RecipientProviderUtilities.addUsers(Collections.singleton(user), listener, env, to, cc, bcc, debug);
+ }
+
+ @Extension
+"
+1," public final void parse(Set mappingStreams) {
+ try {
+ JAXBContext jc = JAXBContext.newInstance( ConstraintMappingsType.class );
+
+ Set alreadyProcessedConstraintDefinitions = newHashSet();
+ for ( InputStream in : mappingStreams ) {
+ String schemaVersion = xmlParserHelper.getSchemaVersion( ""constraint mapping file"", in );
+ String schemaResourceName = getSchemaResourceName( schemaVersion );
+ Schema schema = xmlParserHelper.getSchema( schemaResourceName );
+
+ Unmarshaller unmarshaller = jc.createUnmarshaller();
+ unmarshaller.setSchema( schema );
+
+ ConstraintMappingsType mapping = getValidationConfig( in, unmarshaller );
+ String defaultPackage = mapping.getDefaultPackage();
+
+ parseConstraintDefinitions(
+ mapping.getConstraintDefinition(),
+ defaultPackage,
+ alreadyProcessedConstraintDefinitions
+ );
+
+ for ( BeanType bean : mapping.getBean() ) {
+ Class> beanClass = ClassLoadingHelper.loadClass( bean.getClazz(), defaultPackage );
+ checkClassHasNotBeenProcessed( processedClasses, beanClass );
+
+ // update annotation ignores
+ annotationProcessingOptions.ignoreAnnotationConstraintForClass(
+ beanClass,
+ bean.getIgnoreAnnotations()
+ );
+
+ ConstrainedType constrainedType = ConstrainedTypeBuilder.buildConstrainedType(
+ bean.getClassType(),
+ beanClass,
+ defaultPackage,
+ constraintHelper,
+ annotationProcessingOptions,
+ defaultSequences
+ );
+ if ( constrainedType != null ) {
+ addConstrainedElement( beanClass, constrainedType );
+ }
+
+ Set constrainedFields = ConstrainedFieldBuilder.buildConstrainedFields(
+ bean.getField(),
+ beanClass,
+ defaultPackage,
+ constraintHelper,
+ annotationProcessingOptions
+ );
+ addConstrainedElements( beanClass, constrainedFields );
+
+ Set constrainedGetters = ConstrainedGetterBuilder.buildConstrainedGetters(
+ bean.getGetter(),
+ beanClass,
+ defaultPackage,
+ constraintHelper,
+ annotationProcessingOptions
+ );
+ addConstrainedElements( beanClass, constrainedGetters );
+
+ Set constrainedConstructors = ConstrainedExecutableBuilder.buildConstructorConstrainedExecutable(
+ bean.getConstructor(),
+ beanClass,
+ defaultPackage,
+ parameterNameProvider,
+ constraintHelper,
+ annotationProcessingOptions
+ );
+ addConstrainedElements( beanClass, constrainedConstructors );
+
+ Set constrainedMethods = ConstrainedExecutableBuilder.buildMethodConstrainedExecutable(
+ bean.getMethod(),
+ beanClass,
+ defaultPackage,
+ parameterNameProvider,
+ constraintHelper,
+ annotationProcessingOptions
+ );
+ addConstrainedElements( beanClass, constrainedMethods );
+
+ processedClasses.add( beanClass );
+ }
+ }
+ }
+ catch ( JAXBException e ) {
+ throw log.getErrorParsingMappingFileException( e );
+ }
+ }
+
+"
+1," public void addRecipients(final ExtendedEmailPublisherContext context, EnvVars env, Set to, Set cc, Set bcc) {
+ final class Debug implements RecipientProviderUtilities.IDebug {
+ private final ExtendedEmailPublisherDescriptor descriptor
+ = Jenkins.getActiveInstance().getDescriptorByType(ExtendedEmailPublisherDescriptor.class);
+
+ private final PrintStream logger = context.getListener().getLogger();
+
+ public void send(final String format, final Object... args) {
+ descriptor.debug(logger, format, args);
+ }
+ }
+ final Debug debug = new Debug();
+ Set users = RecipientProviderUtilities.getChangeSetAuthors(Collections.>singleton(context.getRun()), debug);
+ RecipientProviderUtilities.addUsers(users, context.getListener(), env, to, cc, bcc, debug);
+ }
+
+ @Extension
+"
+1," private boolean evaluate(String text) {
+ try {
+ InputSource inputSource = new InputSource(new StringReader(text));
+ return ((Boolean)expression.evaluate(inputSource, XPathConstants.BOOLEAN)).booleanValue();
+ } catch (XPathExpressionException e) {
+ return false;
+ }
+ }
+
+ @Override
+"
+1," public void basicWorkFlowTest() throws Exception {
+ Client client = client();
+
+ logger.info(""--> creating repository"");
+ assertAcked(client.admin().cluster().preparePutRepository(""test-repo"")
+ .setType(""fs"").setSettings(ImmutableSettings.settingsBuilder()
+ .put(""location"", newTempDir())
+ .put(""compress"", randomBoolean())
+ .put(""chunk_size"", randomIntBetween(100, 1000))));
+
+ createIndex(""test-idx-1"", ""test-idx-2"", ""test-idx-3"");
+ ensureGreen();
+
+ logger.info(""--> indexing some data"");
+ for (int i = 0; i < 100; i++) {
+ index(""test-idx-1"", ""doc"", Integer.toString(i), ""foo"", ""bar"" + i);
+ index(""test-idx-2"", ""doc"", Integer.toString(i), ""foo"", ""baz"" + i);
+ index(""test-idx-3"", ""doc"", Integer.toString(i), ""foo"", ""baz"" + i);
+ }
+ refresh();
+ assertHitCount(client.prepareCount(""test-idx-1"").get(), 100L);
+ assertHitCount(client.prepareCount(""test-idx-2"").get(), 100L);
+ assertHitCount(client.prepareCount(""test-idx-3"").get(), 100L);
+
+ ListenableActionFuture flushResponseFuture = null;
+ if (randomBoolean()) {
+ ArrayList indicesToFlush = newArrayList();
+ for (int i = 1; i < 4; i++) {
+ if (randomBoolean()) {
+ indicesToFlush.add(""test-idx-"" + i);
+ }
+ }
+ if (!indicesToFlush.isEmpty()) {
+ String[] indices = indicesToFlush.toArray(new String[indicesToFlush.size()]);
+ logger.info(""--> starting asynchronous flush for indices {}"", Arrays.toString(indices));
+ flushResponseFuture = client.admin().indices().prepareFlush(indices).execute();
+ }
+ }
+ logger.info(""--> snapshot"");
+ CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).setIndices(""test-idx-*"", ""-test-idx-3"").get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
+
+ assertThat(client.admin().cluster().prepareGetSnapshots(""test-repo"").setSnapshots(""test-snap"").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
+
+ logger.info(""--> delete some data"");
+ for (int i = 0; i < 50; i++) {
+ client.prepareDelete(""test-idx-1"", ""doc"", Integer.toString(i)).get();
+ }
+ for (int i = 50; i < 100; i++) {
+ client.prepareDelete(""test-idx-2"", ""doc"", Integer.toString(i)).get();
+ }
+ for (int i = 0; i < 100; i += 2) {
+ client.prepareDelete(""test-idx-3"", ""doc"", Integer.toString(i)).get();
+ }
+ refresh();
+ assertHitCount(client.prepareCount(""test-idx-1"").get(), 50L);
+ assertHitCount(client.prepareCount(""test-idx-2"").get(), 50L);
+ assertHitCount(client.prepareCount(""test-idx-3"").get(), 50L);
+
+ logger.info(""--> close indices"");
+ client.admin().indices().prepareClose(""test-idx-1"", ""test-idx-2"").get();
+
+ logger.info(""--> restore all indices from the snapshot"");
+ RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).execute().actionGet();
+ assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
+
+ ensureGreen();
+ for (int i=0; i<5; i++) {
+ assertHitCount(client.prepareCount(""test-idx-1"").get(), 100L);
+ assertHitCount(client.prepareCount(""test-idx-2"").get(), 100L);
+ assertHitCount(client.prepareCount(""test-idx-3"").get(), 50L);
+ }
+
+ // Test restore after index deletion
+ logger.info(""--> delete indices"");
+ cluster().wipeIndices(""test-idx-1"", ""test-idx-2"");
+ logger.info(""--> restore one index after deletion"");
+ restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot(""test-repo"", ""test-snap"").setWaitForCompletion(true).setIndices(""test-idx-*"", ""-test-idx-2"").execute().actionGet();
+ assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
+
+ ensureGreen();
+ for (int i=0; i<5; i++) {
+ assertHitCount(client.prepareCount(""test-idx-1"").get(), 100L);
+ }
+ ClusterState clusterState = client.admin().cluster().prepareState().get().getState();
+ assertThat(clusterState.getMetaData().hasIndex(""test-idx-1""), equalTo(true));
+ assertThat(clusterState.getMetaData().hasIndex(""test-idx-2""), equalTo(false));
+
+ if (flushResponseFuture != null) {
+ // Finish flush
+ flushResponseFuture.actionGet();
+ }
+ }
+
+
+ @Test
+"
+1," public Object getValue(Object parent) {
+ return parent;
+ }
+
+ @Override
+"
+1," public void handshake(Socket sock) throws IOException {
+ ((SSLSocket)sock).startHandshake();
+ }
+
+ /*
+ * Determines the SSL cipher suites to be enabled.
+ *
+ * @param requestedCiphers Comma-separated list of requested ciphers
+ * @param supportedCiphers Array of supported ciphers
+ *
+ * @return Array of SSL cipher suites to be enabled, or null if none of the
+ * requested ciphers are supported
+ */
+"
+1," ValidationException getErrorParsingMappingFileException(@Cause JAXBException e);
+
+ @Message(id = 116, value = ""%s"")
+"
+1," private static ManagedMap parseInterceptUrlsForFilterInvocationRequestMap(
+ MatcherType matcherType, List urlElts, boolean useExpressions,
+ boolean addAuthenticatedAll, ParserContext parserContext) {
+
+ ManagedMap filterInvocationDefinitionMap = new ManagedMap();
+
+ for (Element urlElt : urlElts) {
+ String access = urlElt.getAttribute(ATT_ACCESS);
+ if (!StringUtils.hasText(access)) {
+ continue;
+ }
+
+ String path = urlElt.getAttribute(ATT_PATTERN);
+
+ if (!StringUtils.hasText(path)) {
+ parserContext.getReaderContext().error(
+ ""path attribute cannot be empty or null"", urlElt);
+ }
+
+ String method = urlElt.getAttribute(ATT_HTTP_METHOD);
+ if (!StringUtils.hasText(method)) {
+ method = null;
+ }
+
+ BeanDefinition matcher = matcherType.createMatcher(path, method);
+ BeanDefinitionBuilder attributeBuilder = BeanDefinitionBuilder
+ .rootBeanDefinition(SecurityConfig.class);
+
+ if (useExpressions) {
+ logger.info(""Creating access control expression attribute '"" + access
+ + ""' for "" + path);
+ // The single expression will be parsed later by the
+ // ExpressionFilterInvocationSecurityMetadataSource
+ attributeBuilder.addConstructorArgValue(new String[] { access });
+ attributeBuilder.setFactoryMethod(""createList"");
+
+ }
+ else {
+ attributeBuilder.addConstructorArgValue(access);
+ attributeBuilder.setFactoryMethod(""createListFromCommaDelimitedString"");
+ }
+
+ if (filterInvocationDefinitionMap.containsKey(matcher)) {
+ logger.warn(""Duplicate URL defined: "" + path
+ + "". The original attribute values will be overwritten"");
+ }
+
+ filterInvocationDefinitionMap.put(matcher,
+ attributeBuilder.getBeanDefinition());
+ }
+
+ if (addAuthenticatedAll && filterInvocationDefinitionMap.isEmpty()) {
+
+ BeanDefinition matcher = matcherType.createMatcher(""/**"", null);
+ BeanDefinitionBuilder attributeBuilder = BeanDefinitionBuilder
+ .rootBeanDefinition(SecurityConfig.class);
+ attributeBuilder.addConstructorArgValue(new String[] { ""authenticated"" });
+ attributeBuilder.setFactoryMethod(""createList"");
+ filterInvocationDefinitionMap.put(matcher,
+ attributeBuilder.getBeanDefinition());
+ }
+
+ return filterInvocationDefinitionMap;
+ }
+
+"
+1," public void setApplicationContext(ApplicationContext applicationContext)
+ throws BeansException {
+ defaultWebSecurityExpressionHandler.setApplicationContext(applicationContext);
+ }
+"
+1," public void testDoEnable() throws Exception{
+ FreeStyleProject project = j.createFreeStyleProject(""project"");
+ GlobalMatrixAuthorizationStrategy auth = new GlobalMatrixAuthorizationStrategy();
+ j.jenkins.setAuthorizationStrategy(auth);
+ j.jenkins.setCrumbIssuer(null);
+ HudsonPrivateSecurityRealm realm = new HudsonPrivateSecurityRealm(false);
+ j.jenkins.setSecurityRealm(realm);
+ User user = realm.createAccount(""John Smith"", ""password"");
+ SecurityContextHolder.getContext().setAuthentication(user.impersonate());
+ project.disable();
+ try{
+ project.doEnable();
+ fail(""User should not have permission to build project"");
+ }
+ catch(Exception e){
+ if(!(e.getClass().isAssignableFrom(AccessDeniedException2.class))){
+ fail(""AccessDeniedException should be thrown."");
+ }
+ }
+ auth.add(Job.READ, user.getId());
+ auth.add(Job.CONFIGURE, user.getId());
+ auth.add(Jenkins.READ, user.getId());
+ List forms = j.createWebClient().login(user.getId(), ""password"").goTo(project.getUrl()).getForms();
+ for(HtmlForm form:forms){
+ if(""enable"".equals(form.getAttribute(""action""))){
+ j.submit(form);
+ }
+ }
+ assertFalse(""Project should be enabled."", project.isDisabled());
+ }
+
+ /**
+ * Job is un-restricted (no nabel), this is submitted to queue, which spawns an on demand slave
+ * @throws Exception
+ */
+ @Test
+"