input
stringlengths 205
73.3k
| output
stringlengths 64
73.2k
| instruction
stringclasses 1
value |
---|---|---|
#vulnerable code
@Test
public void testRetyableMutateRow() throws InterruptedException {
final MutateRowRequest request = MutateRowRequest.getDefaultInstance();
final AtomicBoolean done = new AtomicBoolean(false);
executor.submit(new Callable<Void>(){
@Override
public Void call() throws Exception {
underTest.mutateRow(request);
done.set(true);
synchronized (done) {
done.notify();
}
return null;
}
});
Thread.sleep(100);
future.set(MutateRowsResponse.getDefaultInstance());
synchronized (done) {
done.wait(1000);
}
assertTrue(done.get());
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
}
#location 17
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
public void testRetyableMutateRow() throws Exception {
final MutateRowRequest request = MutateRowRequest.getDefaultInstance();
when(mockFuture.get()).thenReturn(Empty.getDefaultInstance());
underTest.mutateRow(request);
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testGetCallback() throws Exception {
when(mockBulkRead.add(any(Query.class))).thenReturn(mockFuture);
byte[] key = randomBytes(8);
FlatRow response = FlatRow.newBuilder().withRowKey(ByteString.copyFrom(key)).build();
setFuture(ImmutableList.of(response));
final Callback<Result> callback = Mockito.mock(Callback.class);
List<Get> gets = Arrays.asList(new Get(key));
createExecutor(options).batchCallback(gets, new Object[1], callback);
verify(callback, times(1))
.update(
same(BatchExecutor.NO_REGION),
same(key),
argThat(matchesRow(Adapters.FLAT_ROW_ADAPTER.adaptResponse(response))));
}
#location 9
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test
public void testGetCallback() throws Exception {
when(mockBulkRead.add(any(Query.class))).thenReturn(mockFuture);
byte[] key = randomBytes(8);
FlatRow response = FlatRow.newBuilder().withRowKey(ByteString.copyFrom(key)).build();
setFuture(ImmutableList.of(response));
final Callback<Result> callback = Mockito.mock(Callback.class);
List<Get> gets = Arrays.asList(new Get(key));
createExecutor().batchCallback(gets, new Object[1], callback);
verify(callback, times(1))
.update(
same(BatchExecutor.NO_REGION),
same(key),
argThat(matchesRow(Adapters.FLAT_ROW_ADAPTER.adaptResponse(response))));
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
final Future<?> waiter;
synchronized (underTest.lock) {
waiter = underTest.isRefreshing ? underTest.futureToken : Futures.immediateFuture(null);
}
waiter.get();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 22
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
underTest.syncRefresh();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
protected boolean onOK(Metadata trailers) {
ProcessingStatus status = requestManager.onOK();
if (status == ProcessingStatus.INVALID) {
// Set an exception.
onError(INVALID_RESPONSE, trailers);
return true;
}
// There was a problem in the data found in onMessage(), so fail the RPC.
if (status == ProcessingStatus.SUCCESS || status == ProcessingStatus.NOT_RETRYABLE) {
// Set the response, with either success, or non-retryable responses.
completionFuture.set(Arrays.asList(requestManager.buildResponse()));
return true;
}
// Perform a partial retry, if the backoff policy allows it.
long nextBackOff = getNextBackoff();
if (nextBackOff == BackOff.STOP) {
// Return the response as is, and don't retry;
rpc.getRpcMetrics().markRetriesExhasted();
completionFuture.set(Arrays.asList(requestManager.buildResponse()));
operationSpan.addAnnotation("MutationCount", ImmutableMap.of("failureCount",
AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount())));
return true;
}
performRetry(nextBackOff);
operationSpan.addAnnotation("MutationCount", ImmutableMap.of("retryCount",
AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount())));
return false;
}
#location 19
#vulnerability type NULL_DEREFERENCE
|
#fixed code
@Override
protected boolean onOK(Metadata trailers) {
ProcessingStatus status = requestManager.onOK();
if (status == ProcessingStatus.INVALID) {
// Set an exception.
onError(INVALID_RESPONSE, trailers);
return true;
}
// There was a problem in the data found in onMessage(), so fail the RPC.
if (status == ProcessingStatus.SUCCESS || status == ProcessingStatus.NOT_RETRYABLE) {
// Set the response, with either success, or non-retryable responses.
completionFuture.set(Arrays.asList(requestManager.buildResponse()));
return true;
}
// Perform a partial retry, if the backoff policy allows it.
Long nextBackOff = getNextBackoff();
if (nextBackOff == null) {
// Return the response as is, and don't retry;
rpc.getRpcMetrics().markRetriesExhasted();
completionFuture.set(Arrays.asList(requestManager.buildResponse()));
operationSpan.addAnnotation("MutationCount", ImmutableMap.of("failureCount",
AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount())));
return true;
}
performRetry(nextBackOff);
operationSpan.addAnnotation("MutationCount", ImmutableMap.of("retryCount",
AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount())));
return false;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 28
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public String authority() {
return delegate.authority();
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Override
public String authority() {
return authority;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
operationTimerContext.close();
}
} else {
onError(status, trailers);
}
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
try (NonThrowingCloseable s = TRACER.withSpan(operationSpan)) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
finalizeStats(status);
}
} else {
onError(status, trailers);
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void awaitCompletion() throws InterruptedException {
boolean performedWarning = false;
lock.lock();
try {
while (!isFlushed()) {
flushedCondition.await(finishWaitMillis, TimeUnit.MILLISECONDS);
long now = clock.nanoTime();
if (now >= noSuccessWarningDeadlineNanos) {
logNoSuccessWarning(now);
resetNoSuccessWarningDeadline();
performedWarning = true;
}
}
if (performedWarning) {
LOG.info("awaitCompletion() completed");
}
} finally {
lock.unlock();
}
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
public void awaitCompletion() throws InterruptedException {
boolean performedWarning = false;
lock.lock();
try {
while (!isFlushed()) {
flushedCondition.await(finishWaitMillis, TimeUnit.MILLISECONDS);
long now = clock.nanoTime();
if (now >= noSuccessCheckDeadlineNanos) {
// There are unusual cases where an RPC could be completed, but we don't clean up
// the state and the locks. Try to clean up if there is a timeout.
for (RetryHandler retryHandler : outstandingRetries.values()) {
retryHandler.performRetryIfStale();
}
logNoSuccessWarning(now);
resetNoSuccessWarningDeadline();
performedWarning = true;
}
}
if (performedWarning) {
LOG.info("awaitCompletion() completed");
}
} finally {
lock.unlock();
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testRetyableMutateRow() throws InterruptedException {
final MutateRowRequest request = MutateRowRequest.getDefaultInstance();
final AtomicBoolean done = new AtomicBoolean(false);
executor.submit(new Callable<Void>(){
@Override
public Void call() throws Exception {
underTest.mutateRow(request);
done.set(true);
synchronized (done) {
done.notify();
}
return null;
}
});
Thread.sleep(100);
future.set(MutateRowsResponse.getDefaultInstance());
synchronized (done) {
done.wait(1000);
}
assertTrue(done.get());
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
public void testRetyableMutateRow() throws Exception {
final MutateRowRequest request = MutateRowRequest.getDefaultInstance();
when(mockFuture.get()).thenReturn(Empty.getDefaultInstance());
underTest.mutateRow(request);
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public void setException(Exception exception) {
rowObserver.onError(exception);
// cleanup any state that was in RowMerger. There may be a partial row in progress which needs
// to be reset.
rowMerger = new RowMerger(rowObserver);
super.setException(exception);
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Override
public void setException(Exception exception) {
rowMerger.onError(exception);
super.setException(exception);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
protected void cancel(final String message) {
call.cancel(message, null);
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
protected void cancel(final String message) {
callWrapper.cancel(message, null);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
HeaderCacheElement syncRefresh() {
try (Closeable ss = Tracing.getTracer().spanBuilder("CredentialsRefresh").startScopedSpan()) {
return asyncRefresh().get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOG.warn("Interrupted while trying to refresh google credentials.", e);
Thread.currentThread().interrupt();
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Authentication was interrupted.")
.withCause(e)
);
} catch (ExecutionException e) {
LOG.warn("ExecutionException while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("ExecutionException during Authentication.")
.withCause(e)
);
} catch (TimeoutException e) {
LOG.warn("TimeoutException while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("TimeoutException during Authentication.")
.withCause(e)
);
} catch (Exception e) {
LOG.warn("Unexpected execption while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Unexpected execption during Authentication.")
.withCause(e)
);
}
}
#location 3
#vulnerability type INTERFACE_NOT_THREAD_SAFE
|
#fixed code
Future<HeaderCacheElement> asyncRefresh() {
LOG.trace("asyncRefresh");
synchronized (lock) {
try {
if (futureToken != null) {
return futureToken;
}
if (headerCache.getCacheState() == CacheState.Good) {
return Futures.immediateFuture(headerCache);
}
Future<HeaderCacheElement> future = executor.submit(new Callable<HeaderCacheElement>() {
@Override
public HeaderCacheElement call() throws Exception {
return updateToken();
}
});
if (!future.isDone()) {
this.futureToken = future;
}
return future;
} catch (RuntimeException e) {
futureToken = null;
LOG.warn("Got an unexpected exception while trying to refresh google credentials.", e);
return Futures.immediateFuture(new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Unexpected error trying to authenticate")
.withCause(e)));
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testGetCallback() throws Exception {
when(mockBulkRead.add(any(Query.class))).thenReturn(mockFuture);
byte[] key = randomBytes(8);
FlatRow response = FlatRow.newBuilder().withRowKey(ByteString.copyFrom(key)).build();
setFuture(ImmutableList.of(response));
final Callback<Result> callback = Mockito.mock(Callback.class);
List<Get> gets = Arrays.asList(new Get(key));
createExecutor().batchCallback(gets, new Object[1], callback);
verify(callback, times(1))
.update(
same(BatchExecutor.NO_REGION),
same(key),
argThat(matchesRow(Adapters.FLAT_ROW_ADAPTER.adaptResponse(response))));
}
#location 9
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test
public void testGetCallback() throws Exception {
when(mockBulkRead.add(any(Query.class))).thenReturn(mockFuture);
byte[] key = randomBytes(8);
Result response =
Result.create(
ImmutableList.<Cell>of(
new RowCell(
key,
Bytes.toBytes("family"),
Bytes.toBytes(""),
1000L,
Bytes.toBytes("value"))));
setFuture(response);
final Callback<Result> callback = Mockito.mock(Callback.class);
createExecutor().batchCallback(ImmutableList.<Row>of(new Get(key)), new Object[1], callback);
verify(callback, times(1))
.update(same(BatchExecutor.NO_REGION), same(key), argThat(matchesRow(response)));
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public static Credentials getCredentials(CredentialOptions options)
throws IOException, GeneralSecurityException {
switch (options.getCredentialType()) {
case DefaultCredentials:
return getApplicationDefaultCredential();
case P12:
P12CredentialOptions p12Options = (P12CredentialOptions) options;
return getCredentialFromPrivateKeyServiceAccount(
p12Options.getServiceAccount(), p12Options.getKeyFile());
case SuppliedCredentials:
return ((UserSuppliedCredentialOptions) options).getCredential();
case SuppliedJson:
JsonCredentialsOptions jsonCredentialsOptions = (JsonCredentialsOptions) options;
synchronized (jsonCredentialsOptions) {
if (jsonCredentialsOptions.getCachedCredentials() == null) {
jsonCredentialsOptions.setCachedCredentails(
getInputStreamCredential(jsonCredentialsOptions.getInputStream()));
}
return jsonCredentialsOptions.getCachedCredentials();
}
case None:
return null;
default:
throw new IllegalStateException(
"Cannot process Credential type: " + options.getCredentialType());
}
}
#location 17
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
public static Credentials getCredentials(CredentialOptions options)
throws IOException, GeneralSecurityException {
return patchCredentials(getCredentialsInner(options));
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
protected void run() {
try (Scope scope = TRACER.withSpan(operationSpan)) {
rpcTimerContext = rpc.getRpcMetrics().timeRpc();
operationSpan.addAnnotation(Annotation.fromDescriptionAndAttributes("rpcStart",
ImmutableMap.of("attempt", AttributeValue.longAttributeValue(failedCount))));
Metadata metadata = new Metadata();
metadata.merge(originalMetadata);
synchronized (callLock) {
// There's a subtle race condition in RetryingStreamOperation which requires a separate
// newCall/start split. The call variable needs to be set before onMessage() happens; that
// usually will occur, but some unit tests broke with a merged newCall and start.
call = rpc.newCall(getRpcCallOptions());
rpc.start(getRetryRequest(), this, metadata, call);
}
} catch (Exception e) {
setException(e);
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
protected void run() {
try (Scope scope = TRACER.withSpan(operationSpan)) {
rpcTimerContext = rpc.getRpcMetrics().timeRpc();
operationSpan.addAnnotation(Annotation.fromDescriptionAndAttributes("rpcStart",
ImmutableMap.of("attempt", AttributeValue.longAttributeValue(failedCount))));
Metadata metadata = new Metadata();
metadata.merge(originalMetadata);
callWrapper.setCallAndStart(rpc, getRpcCallOptions(), getRetryRequest(), this, metadata);
} catch (Exception e) {
setException(e);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
HeaderCacheElement syncRefresh() {
try (Closeable ss = Tracing.getTracer().spanBuilder("CredentialsRefresh").startScopedSpan()) {
return asyncRefresh().get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOG.warn("Interrupted while trying to refresh google credentials.", e);
Thread.currentThread().interrupt();
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Authentication was interrupted.")
.withCause(e)
);
} catch (ExecutionException e) {
LOG.warn("ExecutionException while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("ExecutionException during Authentication.")
.withCause(e)
);
} catch (TimeoutException e) {
LOG.warn("TimeoutException while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("TimeoutException during Authentication.")
.withCause(e)
);
} catch (Exception e) {
LOG.warn("Unexpected execption while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Unexpected execption during Authentication.")
.withCause(e)
);
}
}
#location 4
#vulnerability type INTERFACE_NOT_THREAD_SAFE
|
#fixed code
Future<HeaderCacheElement> asyncRefresh() {
LOG.trace("asyncRefresh");
synchronized (lock) {
try {
if (futureToken != null) {
return futureToken;
}
if (headerCache.getCacheState() == CacheState.Good) {
return Futures.immediateFuture(headerCache);
}
Future<HeaderCacheElement> future = executor.submit(new Callable<HeaderCacheElement>() {
@Override
public HeaderCacheElement call() throws Exception {
return updateToken();
}
});
if (!future.isDone()) {
this.futureToken = future;
}
return future;
} catch (RuntimeException e) {
futureToken = null;
LOG.warn("Got an unexpected exception while trying to refresh google credentials.", e);
return Futures.immediateFuture(new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Unexpected error trying to authenticate")
.withCause(e)));
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 43
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@SuppressWarnings("unchecked")
@Override
public void run() {
try {
// restart the clock.
this.rowMerger = new RowMerger(rowObserver);
adapter = new CallToStreamObserverAdapter();
synchronized (callLock) {
super.run();
// pre-fetch one more result, for performance reasons.
adapter.request(1);
if (rowObserver instanceof ClientResponseObserver) {
((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter);
}
lastResponseMs = clock.currentTimeMillis();
}
} catch (Exception e) {
setException(e);
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@SuppressWarnings("unchecked")
@Override
public void run() {
try {
// restart the clock.
synchronized (callLock) {
super.run();
// pre-fetch one more result, for performance reasons.
adapter.request(1);
if (rowObserver instanceof ClientResponseObserver) {
((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter);
}
lastResponseMs = clock.currentTimeMillis();
}
} catch (Exception e) {
setException(e);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public IBigtableDataClient getClientWrapper() {
if (options.useGCJClient()) {
if (this.dataGCJClient == null) {
synchronized (BigtableSession.this) {
try {
if (dataGCJClient == null) {
BigtableDataSettings dataSettings =
BigtableVeneerSettingsFactory.createBigtableDataSettings(options);
this.dataGCJClient = new BigtableDataGCJClient(
com.google.cloud.bigtable.data.v2.BigtableDataClient.create(dataSettings));
}
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
}
return dataGCJClient;
} else {
return new BigtableDataClientWrapper(dataClient, getDataRequestContext());
}
}
#location 17
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
public IBigtableDataClient getClientWrapper() {
if (options.useGCJClient()) {
return dataGCJClient;
} else {
return new BigtableDataClientWrapper(dataClient, getDataRequestContext());
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
operationTimerContext.close();
}
} else {
onError(status, trailers);
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
try (NonThrowingCloseable s = TRACER.withSpan(operationSpan)) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
finalizeStats(status);
}
} else {
onError(status, trailers);
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testSyncRefresh() throws IOException {
initialize(HeaderCacheElement.TOKEN_STALENESS_MS + 1);
Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState());
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
public void testSyncRefresh() throws IOException {
initialize(HeaderCacheElement.TOKEN_STALENESS_MS + 1);
Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState());
Assert.assertFalse(underTest.isRefreshing());
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
/**
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception,
TimeoutException {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTime(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new String("");
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
Future<Void> future = executorService.submit(syncRefreshCallable);
// let the Thread running syncRefreshCallable() have a turn so that it can initiate the call
// to refreshAccessToken().
Thread.yield();
synchronized(lock) {
lock.notifyAll();
}
// Try to get the access token, which should be calculated at this point. There's
// a possibility that some hanging occurs in the test code. If the operation times out
// so timeout after 1 second, this will throw a TimeoutException.
future.get(1, TimeUnit.SECONDS);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing.get());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
future = executorService.submit(syncRefreshCallable);
// Let the asyncRefreshes do their thing.
Thread.yield();
// There should be a single thread kicked off by the underTest.asyncRefresh() calls about
// actually doing a refresh at this point; the other ones will have see that a refresh is in
// progress and finish the invocation of the Thread without performing a refres().. Make sure
// that at least 1 refresh process is in progress.
Assert.assertTrue(underTest.isRefreshing.get());
synchronized(lock) {
// Release the lock so that all of the async refreshing can complete.
lock.notifyAll();
}
// Wait for no more than a second to make sure that the call to underTest.syncRefresh()
// completes properly. If a second passes without syncRefresh() completing, future.get(..)
// will throw a TimeoutException.
future.get(1, TimeUnit.SECONDS);
Assert.assertFalse(underTest.isRefreshing.get());
}
#location 44
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
/**
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception,
TimeoutException {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTime(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new String("");
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing.get());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing.get());
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 20
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 54
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 196
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if (xlearningAppType.equals("LIGHTLDA")) {
if (this.role.equals(XLearningConstants.PS)) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightLDALocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = this.index + " " + address.getHostAddress() + ":" + this.lightLDALocalPort;
this.lightLDAEndpoint = address.getHostAddress() + ":" + this.lightLDALocalPort;
LOG.info("lightLDA ip port string is: " + ipPortStr);
amClient.reportLightLDAIpPort(containerId, ipPortStr);
}
if (this.role.equals(XLearningConstants.WORKER)) {
String lightLDAIpPortStr;
while (true) {
lightLDAIpPortStr = amClient.getLightLDAIpPortStr();
if (lightLDAIpPortStr != null) {
LOG.info("lightLDA IP PORT list is: " + lightLDAIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightLDAIpPortStr, type);
PrintWriter writer = new PrintWriter("lightLDAEndPoints.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!single) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
} else if (xlearningAppType.equals("LIGHTLDA")) {
envList.add("LIGHTLDA_WORKER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_WORKER_NUM.toString()));
envList.add("LIGHTLDA_SERVER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_PS_NUM.toString()));
envList.add("LIGHTLDA_RANK=" + this.index);
envList.add("LIGHTLDA_SERVER_ENDPOINT=" + this.lightLDAEndpoint);
envList.add("LIGHTLDA_ROLE=" + this.role);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 399
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 399
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
String[] env = null;
if ("TENSORFLOW".equals(xlearningAppType)) {
if (single) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef,
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else if (xlearningAppType.equals("MXNET")) {
if (singleMx) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTXGBOOST")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"),
"DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"PYTHONUNBUFFERED=1",
"DMLC_TASK_ID=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"),
"DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"),
"PYTHONUNBUFFERED=1",
dmlcID + "=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
} catch (EOFException e) {
finished = true;
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
}
osw.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start tensorboard process
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand = "tensorboard --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing tensorborad command:" + boardCommand);
boardReservedSocket.close();
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of tensorboard process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of tensorboard process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report tensorboard url:" + boardUrl);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 186
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
String[] env = null;
if ("TENSORFLOW".equals(xlearningAppType)) {
if (single) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef,
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else if (xlearningAppType.equals("MXNET")) {
if (singleMx) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTXGBOOST")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"),
"DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"PYTHONUNBUFFERED=1",
"DMLC_TASK_ID=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()),
"LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort,
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"),
"DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"),
"PYTHONUNBUFFERED=1",
dmlcID + "=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
} catch (EOFException e) {
finished = true;
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
}
osw.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start tensorboard process
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand = "tensorboard --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing tensorborad command:" + boardCommand);
boardReservedSocket.close();
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of tensorboard process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of tensorboard process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report tensorboard url:" + boardUrl);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 196
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if (xlearningAppType.equals("LIGHTLDA")) {
if (this.role.equals(XLearningConstants.PS)) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightLDALocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = this.index + " " + address.getHostAddress() + ":" + this.lightLDALocalPort;
this.lightLDAEndpoint = address.getHostAddress() + ":" + this.lightLDALocalPort;
LOG.info("lightLDA ip port string is: " + ipPortStr);
amClient.reportLightLDAIpPort(containerId, ipPortStr);
}
if (this.role.equals(XLearningConstants.WORKER)) {
String lightLDAIpPortStr;
while (true) {
lightLDAIpPortStr = amClient.getLightLDAIpPortStr();
if (lightLDAIpPortStr != null) {
LOG.info("lightLDA IP PORT list is: " + lightLDAIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightLDAIpPortStr, type);
PrintWriter writer = new PrintWriter("lightLDAEndPoints.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!single) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
} else if (xlearningAppType.equals("LIGHTLDA")) {
envList.add("LIGHTLDA_WORKER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_WORKER_NUM.toString()));
envList.add("LIGHTLDA_SERVER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_PS_NUM.toString()));
envList.add("LIGHTLDA_RANK=" + this.index);
envList.add("LIGHTLDA_SERVER_ENDPOINT=" + this.lightLDAEndpoint);
envList.add("LIGHTLDA_ROLE=" + this.role);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 403
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 196
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if (xlearningAppType.equals("LIGHTLDA")) {
if (this.role.equals(XLearningConstants.PS)) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightLDALocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = this.index + " " + address.getHostAddress() + ":" + this.lightLDALocalPort;
this.lightLDAEndpoint = address.getHostAddress() + ":" + this.lightLDALocalPort;
LOG.info("lightLDA ip port string is: " + ipPortStr);
amClient.reportLightLDAIpPort(containerId, ipPortStr);
}
if (this.role.equals(XLearningConstants.WORKER)) {
String lightLDAIpPortStr;
while (true) {
lightLDAIpPortStr = amClient.getLightLDAIpPortStr();
if (lightLDAIpPortStr != null) {
LOG.info("lightLDA IP PORT list is: " + lightLDAIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightLDAIpPortStr, type);
PrintWriter writer = new PrintWriter("lightLDAEndPoints.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!single) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
} else if (xlearningAppType.equals("LIGHTLDA")) {
envList.add("LIGHTLDA_WORKER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_WORKER_NUM.toString()));
envList.add("LIGHTLDA_SERVER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_PS_NUM.toString()));
envList.add("LIGHTLDA_RANK=" + this.index);
envList.add("LIGHTLDA_SERVER_ENDPOINT=" + this.lightLDAEndpoint);
envList.add("LIGHTLDA_ROLE=" + this.role);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 392
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 144
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if (xlearningAppType.equals("LIGHTLDA")) {
if (this.role.equals(XLearningConstants.PS)) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightLDALocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = this.index + " " + address.getHostAddress() + ":" + this.lightLDALocalPort;
this.lightLDAEndpoint = address.getHostAddress() + ":" + this.lightLDALocalPort;
LOG.info("lightLDA ip port string is: " + ipPortStr);
amClient.reportLightLDAIpPort(containerId, ipPortStr);
}
if (this.role.equals(XLearningConstants.WORKER)) {
String lightLDAIpPortStr;
while (true) {
lightLDAIpPortStr = amClient.getLightLDAIpPortStr();
if (lightLDAIpPortStr != null) {
LOG.info("lightLDA IP PORT list is: " + lightLDAIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightLDAIpPortStr, type);
PrintWriter writer = new PrintWriter("lightLDAEndPoints.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!single) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
} else if (xlearningAppType.equals("LIGHTLDA")) {
envList.add("LIGHTLDA_WORKER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_WORKER_NUM.toString()));
envList.add("LIGHTLDA_SERVER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_PS_NUM.toString()));
envList.add("LIGHTLDA_RANK=" + this.index);
envList.add("LIGHTLDA_SERVER_ENDPOINT=" + this.lightLDAEndpoint);
envList.add("LIGHTLDA_ROLE=" + this.role);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 403
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 403
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
String[] env = null;
if ("TENSORFLOW".equals(xlearningAppType)) {
if (single) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef,
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else if (xlearningAppType.equals("MXNET")) {
if (singleMx) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTXGBOOST")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"),
"DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"PYTHONUNBUFFERED=1",
"DMLC_TASK_ID=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()),
"LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort,
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"),
"DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"),
"PYTHONUNBUFFERED=1",
dmlcID + "=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
} catch (EOFException e) {
finished = true;
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
}
osw.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing borad command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 234
#vulnerability type RESOURCE_LEAK
|
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
String[] env = null;
if ("TENSORFLOW".equals(xlearningAppType)) {
if (single) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef,
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else if (xlearningAppType.equals("MXNET")) {
if (singleMx) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTXGBOOST")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"),
"DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"PYTHONUNBUFFERED=1",
"DMLC_TASK_ID=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()),
"LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort,
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"),
"DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"),
"PYTHONUNBUFFERED=1",
dmlcID + "=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if(j == 0 && isCache) {
if(conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if(isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private void writeToFile(String message, String path) throws IOException{
if(StringUtils.isEmpty(message) || StringUtils.isEmpty(path)){
return ;
}
RandomAccessFile rf = new RandomAccessFile(path, "rw");
rf.seek(rf.length());
rf.write(message.getBytes());
rf.close();
}
#location 4
#vulnerability type RESOURCE_LEAK
|
#fixed code
private void writeToFile(String message, String path) throws IOException {
if(StringUtils.isEmpty(message) || StringUtils.isEmpty(path)){
return ;
}
PrintWriter out = null;
try {
out = new PrintWriter(new BufferedWriter(new FileWriter(path, true)));
out.println(message);
out.flush();
} finally {
if( null != out ) {
out.close();
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private static synchronized void main(final String args, final Instrumentation inst) {
try {
// // 传递的args参数分两个部分:agentJar路径和agentArgs
// // 分别是Agent的JAR包路径和期望传递到服务端的参数
// final int index = args.indexOf(";");
// final String agentJar = args.substring(0, index);
// final String agentArgs = args.substring(index, args.length());
// 构造自定义的类加载器,尽量减少Greys对现有工程的侵蚀
final ClassLoader agentLoader = new URLClassLoader(new URL[]{new URL("file:" + JARFILE)}) {
// 这里还是放弃破坏双亲委派模型,因为接下来的编程模型太复杂
// @Override
// protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
// final Class<?> loadedClass = findLoadedClass(name);
// if (loadedClass != null) {
// return loadedClass;
// }
//
// try {
// Class<?> aClass = findClass(name);
// if (resolve) {
// resolveClass(aClass);
// }
// return aClass;
// } catch (Exception e) {
// return super.loadClass(name, resolve);
// }
// }
};
// Configure类定义
final Class<?> classOfConfigure = agentLoader.loadClass("com.github.ompc.greys.Configure");
// GaServer类定义
final Class<?> classOfGaServer = agentLoader.loadClass("com.github.ompc.greys.server.GaServer");
// 反序列化成Configure类实例
final Object objectOfConfigure = classOfConfigure.getMethod("toConfigure", String.class)
.invoke(null, args);
// JavaPid
final int javaPid = (Integer) classOfConfigure.getMethod("getJavaPid").invoke(objectOfConfigure);
// 获取GaServer单例
final Object objectOfGaServer = classOfGaServer
.getMethod("getInstance", int.class, Instrumentation.class)
.invoke(null, javaPid, inst);
// gaServer.isBind()
final boolean isBind = (Boolean) classOfGaServer.getMethod("isBind").invoke(objectOfGaServer);
if (!isBind) {
classOfGaServer.getMethod("bind", classOfConfigure).invoke(objectOfGaServer, objectOfConfigure);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
#location 39
#vulnerability type RESOURCE_LEAK
|
#fixed code
private static synchronized void main(final String args, final Instrumentation inst) {
try {
// 传递的args参数分两个部分:agentJar路径和agentArgs
// 分别是Agent的JAR包路径和期望传递到服务端的参数
final int index = args.indexOf(";");
final String agentJar = args.substring(0, index);
final String agentArgs = args.substring(index, args.length());
// 构造自定义的类加载器,尽量减少Greys对现有工程的侵蚀
final ClassLoader agentLoader = loadOrDefineClassLoader(agentJar);
// Configure类定义
final Class<?> classOfConfigure = agentLoader.loadClass("com.github.ompc.greys.Configure");
// GaServer类定义
final Class<?> classOfGaServer = agentLoader.loadClass("com.github.ompc.greys.server.GaServer");
// 反序列化成Configure类实例
final Object objectOfConfigure = classOfConfigure.getMethod("toConfigure", String.class)
.invoke(null, agentArgs);
// JavaPid
final int javaPid = (Integer) classOfConfigure.getMethod("getJavaPid").invoke(objectOfConfigure);
// 获取GaServer单例
final Object objectOfGaServer = classOfGaServer
.getMethod("getInstance", int.class, Instrumentation.class)
.invoke(null, javaPid, inst);
// gaServer.isBind()
final boolean isBind = (Boolean) classOfGaServer.getMethod("isBind").invoke(objectOfGaServer);
if (!isBind) {
classOfGaServer.getMethod("bind", classOfConfigure).invoke(objectOfGaServer, objectOfConfigure);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private void write(long gaSessionId, String jobId, boolean isF, String message) {
if(isF){
message += endMark;
}
if(StringUtils.isEmpty(message)){
return;
}
RandomAccessFile rf;
try {
new File(executeResultDir).mkdir();
rf = new RandomAccessFile(getExecuteFilePath(jobId), "rw");
rf.seek(rf.length());
rf.write(message.getBytes());
rf.close();
} catch (IOException e) {
logger.warn("jobFile write error!",e);
return ;
}
}
#location 18
#vulnerability type RESOURCE_LEAK
|
#fixed code
private void write(long gaSessionId, String jobId, boolean isF, String message) {
if(isF){
message += endMark;
}
if(StringUtils.isEmpty(message)){
return;
}
RandomAccessFile rf = null;
try {
new File(executeResultDir).mkdir();
rf = new RandomAccessFile(getExecuteFilePath(jobId), "rw");
rf.seek(rf.length());
rf.write(message.getBytes());
rf.close();
} catch (IOException e) {
logger.warn("jobFile write error!",e);
return ;
} finally {
if( null != rf ) {
try {
rf.close();
}catch(Exception e) {
//
}
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public static Configure toConfigure(String toString) {
final Configure configure = new Configure();
final String[] pvs = split(toString, ";");
for (String pv : pvs) {
try {
final String[] stringSplitArray = split(pv, "=");
final String p = stringSplitArray[0];
final String v = decode(stringSplitArray[1]);
final Field field = getField(Configure.class, p);
set(field, valueOf(field.getType(), v), configure);
} catch (Throwable t) {
//
}
}
return configure;
}
#location 10
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public static Configure toConfigure(String toString) {
final Configure configure = new Configure();
final String[] pvs = split(toString, ";");
for (String pv : pvs) {
try {
final String[] stringSplitArray = split(pv, "=");
final String p = stringSplitArray[0];
final String v = decode(stringSplitArray[1]);
final Field field = getField(Configure.class, p);
if( null != field ) {
set(field, valueOf(field.getType(), v), configure);
}
} catch (Throwable t) {
//
}
}
return configure;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS);
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), SKIP_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// dump
final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
os.write(enhanceClassByteArray);
os.flush();
os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
if (logger.isLoggable(WARNING)) {
logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t);
}
}
return null;
}
#location 52
#vulnerability type RESOURCE_LEAK
|
#fixed code
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, /*COMPUTE_FRAMES |*/ COMPUTE_MAXS);
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), SKIP_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// // dump
// final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
// os.write(enhanceClassByteArray);
// os.flush();
// os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
if (logger.isLoggable(WARNING)) {
logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t);
}
}
return null;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public String draw() {
String content = tableView.draw();
StringBuilder sb = new StringBuilder();
// 清理多余的空格
Scanner scanner = new Scanner(content);
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (line != null) {
//清理一行后面多余的空格
line = StringUtils.stripEnd(line, " ");
if(line.isEmpty()){
line = " ";
}
}
sb.append(line).append('\n');
}
scanner.close();
return sb.toString();
}
#location 18
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Override
public String draw() {
return filterEmptyLine(tableView.draw());
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS);
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// dump
final java.io.OutputStream os = new FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
os.write(enhanceClassByteArray);
os.flush();;
os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
if (logger.isLoggable(WARNING)) {
logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t);
}
}
return null;
}
#location 52
#vulnerability type RESOURCE_LEAK
|
#fixed code
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS);
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// // dump
// final java.io.OutputStream os = new FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
// os.write(enhanceClassByteArray);
// os.flush();
// os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
if (logger.isLoggable(WARNING)) {
logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t);
}
}
return null;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public byte[] transform(
final ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS) {
/*
* 注意,为了自动计算帧的大小,有时必须计算两个类共同的父类。
* 缺省情况下,ClassWriter将会在getCommonSuperClass方法中计算这些,通过在加载这两个类进入虚拟机时,使用反射API来计算。
* 但是,如果你将要生成的几个类相互之间引用,这将会带来问题,因为引用的类可能还不存在。
* 在这种情况下,你可以重写getCommonSuperClass方法来解决这个问题。
*
* 通过重写 getCommonSuperClass() 方法,更正获取ClassLoader的方式,改成使用指定ClassLoader的方式进行。
* 规避了原有代码采用Object.class.getClassLoader()的方式
*/
@Override
protected String getCommonSuperClass(String type1, String type2) {
Class<?> c, d;
final ClassLoader classLoader = loader;
try {
c = Class.forName(type1.replace('/', '.'), false, classLoader);
d = Class.forName(type2.replace('/', '.'), false, classLoader);
} catch (Exception e) {
throw new RuntimeException(e.toString());
}
if (c.isAssignableFrom(d)) {
return type1;
}
if (d.isAssignableFrom(c)) {
return type2;
}
if (c.isInterface() || d.isInterface()) {
return "java/lang/Object";
} else {
do {
c = c.getSuperclass();
} while (!c.isAssignableFrom(d));
return c.getName().replace('.', '/');
}
}
};
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// dump
final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
os.write(enhanceClassByteArray);
os.flush();
os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
logger.warn("transform loader[{}]:class[{}] failed.", loader, className, t);
}
return null;
}
#location 90
#vulnerability type RESOURCE_LEAK
|
#fixed code
public byte[] transform(
final ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS) {
/*
* 注意,为了自动计算帧的大小,有时必须计算两个类共同的父类。
* 缺省情况下,ClassWriter将会在getCommonSuperClass方法中计算这些,通过在加载这两个类进入虚拟机时,使用反射API来计算。
* 但是,如果你将要生成的几个类相互之间引用,这将会带来问题,因为引用的类可能还不存在。
* 在这种情况下,你可以重写getCommonSuperClass方法来解决这个问题。
*
* 通过重写 getCommonSuperClass() 方法,更正获取ClassLoader的方式,改成使用指定ClassLoader的方式进行。
* 规避了原有代码采用Object.class.getClassLoader()的方式
*/
@Override
protected String getCommonSuperClass(String type1, String type2) {
Class<?> c, d;
final ClassLoader classLoader = loader;
try {
c = Class.forName(type1.replace('/', '.'), false, classLoader);
d = Class.forName(type2.replace('/', '.'), false, classLoader);
} catch (Exception e) {
throw new RuntimeException(e.toString());
}
if (c.isAssignableFrom(d)) {
return type1;
}
if (d.isAssignableFrom(c)) {
return type2;
}
if (c.isInterface() || d.isInterface()) {
return "java/lang/Object";
} else {
do {
c = c.getSuperclass();
} while (!c.isAssignableFrom(d));
return c.getName().replace('.', '/');
}
}
};
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// // dump
// final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
// os.write(enhanceClassByteArray);
// os.flush();
// os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
logger.warn("transform loader[{}]:class[{}] failed.", loader, className, t);
}
return null;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private void read(String jobId, int pos, RespResult respResult) {
RandomAccessFile rf;
StringBuilder sb = new StringBuilder();
int newPos = pos;
try {
rf = new RandomAccessFile(getExecuteFilePath(jobId), "r");
rf.seek(pos);
byte[] buffer = new byte[10000];
int len=0;
while ((len=rf.read(buffer))!=-1) {
newPos += len;
sb.append(new String(buffer,0,len));
}
rf.close();
} catch (IOException e) {
logger.warn("jobFile read error!");
return ;
}
respResult.setPos(newPos);
respResult.setMessage(sb.toString());
}
#location 15
#vulnerability type RESOURCE_LEAK
|
#fixed code
private void read(String jobId, int pos, RespResult respResult) {
int newPos = pos;
final StringBuilder sb = new StringBuilder();
RandomAccessFile rf = null;
try {
rf = new RandomAccessFile(getExecuteFilePath(jobId), "r");
rf.seek(pos);
byte[] buffer = new byte[10000];
int len=0;
while ((len=rf.read(buffer))!=-1) {
newPos += len;
sb.append(new String(buffer,0,len));
}
respResult.setPos(newPos);
respResult.setMessage(sb.toString());
} catch (IOException e) {
logger.warn("jobFile read error!");
return ;
} finally {
if( null != rf ) {
try {
rf.close();
}catch(Exception e) {
//
}
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public static void main(String[] args) throws Exception {
if (args.length != 3) {
printUsage();
return;
}
File manifestXml = new File(args[0]);
String moduleName = args[1];
File baseDir = new File(args[2]);
if (!manifestXml.exists()) {
System.out.println("No such file: " + manifestXml);
printUsage();
return;
}
if (!baseDir.isDirectory()) {
System.out.println("No such directory: " + baseDir);
printUsage();
return;
}
ModuleGenerator moduleGenerator = new ModuleGenerator();
InputSource in = new InputSource(new FileInputStream(manifestXml));
Document document = moduleGenerator.manifestToDocument(in);
File file = moduleGenerator.path(document, moduleName, baseDir);
file.getParentFile().mkdirs();
JavaWriter out = new JavaWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"));
moduleGenerator.generate(document, moduleName, out);
out.close();
}
#location 5
#vulnerability type RESOURCE_LEAK
|
#fixed code
public static void main(String[] args) throws Exception {
if (args.length != 3) {
printUsage();
return;
}
File manifestXml = new File(args[0]);
String moduleName = args[1];
File baseDir = new File(args[2]);
if (!manifestXml.exists()) {
System.out.println("No such file: " + manifestXml);
printUsage();
return;
}
if (!baseDir.isDirectory()) {
System.out.println("No such directory: " + baseDir);
printUsage();
return;
}
generate(manifestXml, moduleName, baseDir);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void validate() {
Map<String, Binding<?>> allBindings;
synchronized (linker) {
linkStaticInjections();
linkEntryPoints();
allBindings = linker.linkAll();
}
new ProblemDetector().detectProblems(allBindings.values());
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
public void validate() {
Map<String, Binding<?>> allBindings = linkEverything();
new ProblemDetector().detectProblems(allBindings.values());
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public static String get(Type type, Annotation[] annotations, Object subject) {
Annotation qualifier = null;
for (Annotation a : annotations) {
if (!IS_QUALIFIER_ANNOTATION.get(a.annotationType())) {
continue;
}
if (qualifier != null) {
throw new IllegalArgumentException("Too many qualifier annotations on " + subject);
}
qualifier = a;
}
return get(type, qualifier);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public static String get(Type type, Annotation[] annotations, Object subject) {
return get(type, extractQualifier(annotations, subject));
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public ObjectGraph plus(Object... modules) {
linker.linkAll();
return makeGraph(this, plugin, modules);
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
public ObjectGraph plus(Object... modules) {
linkEverything();
return makeGraph(this, plugin, modules);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public static void main(String[] args) throws Exception {
if (args.length != 3) {
printUsage();
return;
}
File manifestXml = new File(args[0]);
String moduleName = args[1];
File baseDir = new File(args[2]);
if (!manifestXml.exists()) {
System.out.println("No such file: " + manifestXml);
printUsage();
return;
}
if (!baseDir.isDirectory()) {
System.out.println("No such directory: " + baseDir);
printUsage();
return;
}
ModuleGenerator moduleGenerator = new ModuleGenerator();
InputSource in = new InputSource(new FileInputStream(manifestXml));
Document document = moduleGenerator.manifestToDocument(in);
File file = moduleGenerator.path(document, moduleName, baseDir);
file.getParentFile().mkdirs();
JavaWriter out = new JavaWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"));
moduleGenerator.generate(document, moduleName, out);
out.close();
}
#location 28
#vulnerability type RESOURCE_LEAK
|
#fixed code
public static void main(String[] args) throws Exception {
if (args.length != 3) {
printUsage();
return;
}
File manifestXml = new File(args[0]);
String moduleName = args[1];
File baseDir = new File(args[2]);
if (!manifestXml.exists()) {
System.out.println("No such file: " + manifestXml);
printUsage();
return;
}
if (!baseDir.isDirectory()) {
System.out.println("No such directory: " + baseDir);
printUsage();
return;
}
generate(manifestXml, moduleName, baseDir);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public boolean lock(long seckillId) {
try {
if (lockMap.get(seckillId) == null) {
lockMap.put(seckillId, new InterProcessMutex(client, ROOT_LOCK_PATH+"/"+String.valueOf(seckillId)));
}
lockMap.get(seckillId).acquire(2L, TimeUnit.SECONDS);
return true;
} catch (Exception e) {
log.error(e.getMessage(), e);
return false;
}
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public boolean lock(long seckillId) {
try {
if (threadLock.get() == null) {
Map<Long, InterProcessMutex> map = new HashMap();
map.put(seckillId,new InterProcessMutex(client,ROOT_LOCK_PATH+"/"+String.valueOf(seckillId)));
threadLock.set(map);
}else{
threadLock.get().get(seckillId).acquire(2L, TimeUnit.SECONDS);
}
return true;
} catch (Exception e) {
log.error(e.getMessage(), e);
return false;
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public boolean lock(long seckillId) {
try {
Map<Long, InterProcessMutex> map;
String rootLockPath = "/goodskill";
if (threadLock.get() == null) {
map = new ConcurrentHashMap();
map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
threadLock.set(map);
} else {
if (threadLock.get().get(seckillId) == null) {
map = threadLock.get();
map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
}
}
boolean acquire = threadLock.get().get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) {
log.debug("成功获取到zk锁,秒杀id{}", seckillId);
}
return acquire;
} catch (Exception e) {
log.warn("获取zk锁异常:{}", e.getMessage());
return false;
}
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public boolean lock(long seckillId) {
try {
Map<Long, InterProcessMutex> map;
String rootLockPath = "/goodskill";
Map<Long, InterProcessMutex> processMutexMap = threadLock.get();
if (processMutexMap.get(seckillId) == null) {
processMutexMap.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
}
boolean acquire = processMutexMap.get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) {
log.debug("成功获取到zk锁,秒杀id{}", seckillId);
}
return acquire;
} catch (Exception e) {
log.warn("获取zk锁异常:{}", e.getMessage());
return false;
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private String uploadGoodsPhoto(CommonsMultipartFile file) {
final String s = "/Users/heng/java学习/";
String path = s + file.getOriginalFilename();
try {
String filePath = s;
File file_tmp = new File(filePath);
if (!file_tmp.exists() && !file_tmp.mkdirs()) {
throw new HengException("dir create error!");
}
FileOutputStream fos = new FileOutputStream(path);
InputStream is = file.getInputStream();
int b = 0;
while ((b = is.read()) != -1) {
fos.write(b);
}
fos.flush();
fos.close();
is.close();
} catch (IOException e) {
throw new HengException("上传文件异常");
}
return path;
}
#location 19
#vulnerability type RESOURCE_LEAK
|
#fixed code
private String uploadGoodsPhoto(CommonsMultipartFile file) throws IOException {
final String s = "/Users/heng/java学习/";
String path = s + file.getOriginalFilename();
FileOutputStream fos = null;
InputStream is = null;
try {
String filePath = s;
File file_tmp = new File(filePath);
if (!file_tmp.exists() && !file_tmp.mkdirs()) {
throw new HengException("dir create error!");
}
fos = new FileOutputStream(path);
is = file.getInputStream();
int b;
while ((b = is.read()) != -1) {
fos.write(b);
}
fos.flush();
} catch (IOException e) {
logger.error("error message is:", e);
throw new HengException("上传文件异常");
} finally {
if (fos!=null){
fos.close();
}
if (is!=null){
is.close();
}
}
return path;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public static void main(String[] args) {
log.info(">>>>> goodsKill-rpc-service 正在启动 <<<<<");
AbstractApplicationContext context= new ClassPathXmlApplicationContext(
"classpath*:META-INF/spring/spring-*.xml");
// 程序退出前优雅关闭JVM
context.registerShutdownHook();
context.start();
log.info(">>>>> goodsKill-rpc-service 启动完成 <<<<<");
}
#location 7
#vulnerability type RESOURCE_LEAK
|
#fixed code
public static void main(String[] args) {
SpringApplication.run(GoodsKillRpcServiceApplication.class);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public boolean lock(long seckillId) {
try {
Map<Long, InterProcessMutex> map;
String rootLockPath = "/goodskill";
if (threadLock.get() == null) {
map = new ConcurrentHashMap();
map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
threadLock.set(map);
} else {
if (threadLock.get().get(seckillId) == null) {
map = threadLock.get();
map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
}
}
boolean acquire = threadLock.get().get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) {
log.debug("成功获取到zk锁,秒杀id{}", seckillId);
}
return acquire;
} catch (Exception e) {
log.warn("获取zk锁异常:{}", e.getMessage());
return false;
}
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public boolean lock(long seckillId) {
try {
Map<Long, InterProcessMutex> map;
String rootLockPath = "/goodskill";
Map<Long, InterProcessMutex> processMutexMap = threadLock.get();
if (processMutexMap.get(seckillId) == null) {
processMutexMap.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
}
boolean acquire = processMutexMap.get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) {
log.debug("成功获取到zk锁,秒杀id{}", seckillId);
}
return acquire;
} catch (Exception e) {
log.warn("获取zk锁异常:{}", e.getMessage());
return false;
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public static void main(String[] args) throws IOException {
logger.info(">>>>> goodsKill-rpc-service 正在启动 <<<<<");
ClassPathXmlApplicationContext context= new ClassPathXmlApplicationContext(
"classpath*:META-INF/spring/spring-*.xml");
context.start();
System.in.read();
logger.info(">>>>> goodsKill-rpc-service 启动完成 <<<<<");
}
#location 5
#vulnerability type RESOURCE_LEAK
|
#fixed code
public static void main(String[] args) throws IOException {
logger.info(">>>>> goodsKill-rpc-service 正在启动 <<<<<");
AbstractApplicationContext context= new ClassPathXmlApplicationContext(
"classpath*:META-INF/spring/spring-*.xml");
//程序退出前优雅关闭JVM
context.registerShutdownHook();
context.start();
logger.info(">>>>> goodsKill-rpc-service 启动完成 <<<<<");
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test(expected = NullPointerException.class)
public void testCreateNull() {
new PatternList((String[]) null);
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
#fixed code
@Test(expected = NullPointerException.class)
public void testCreateNull() {
new TemplateList(null,(String[]) null);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
expr.oprand1().accept(this);
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
#location 24
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
oprand.accept(this);
if (!isConstant(expr, token, oprand))
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
protected CtField fieldAccess(ASTree expr) throws CompileError {
CtField f = null;
boolean is_static = false;
if (expr instanceof Member) {
String name = ((Member)expr).get();
try {
f = thisClass.getField(name);
}
catch (NotFoundException e) {
// EXPR might be part of a static member access?
throw new NoFieldException(name, expr);
}
is_static = Modifier.isStatic(f.getModifiers());
if (!is_static)
if (inStaticMethod)
throw new CompileError(
"not available in a static method: " + name);
else
bytecode.addAload(0); // this
}
else if (expr instanceof Expr) {
Expr e = (Expr)expr;
int op = e.getOperator();
if (op == MEMBER) {
f = lookupField((ASTList)e.oprand1(), (Symbol)e.oprand2());
is_static = true;
}
else if (op == '.') {
try {
e.oprand1().accept(this);
if (exprType == CLASS && arrayDim == 0)
f = lookupField(className, (Symbol)e.oprand2());
else
badLvalue();
is_static = Modifier.isStatic(f.getModifiers());
if (is_static)
bytecode.addOpcode(POP);
}
catch (NoFieldException nfe) {
if (nfe.getExpr() != e.oprand1())
throw nfe;
Symbol fname = (Symbol)e.oprand2();
// it should be a static field.
try {
f = lookupField(nfe.getField(), fname);
is_static = true;
}
catch (CompileError ce) {
// EXPR might be part of a qualified class name.
throw new NoFieldException(nfe.getField() + "/"
+ fname.get(), expr);
}
}
}
else
badLvalue();
}
else
badLvalue();
resultStatic = is_static;
return f;
}
#location 33
#vulnerability type NULL_DEREFERENCE
|
#fixed code
protected CtField fieldAccess(ASTree expr) throws CompileError {
CtField f = null;
boolean is_static = false;
if (expr instanceof Member) {
String name = ((Member)expr).get();
try {
f = thisClass.getField(name);
}
catch (NotFoundException e) {
// EXPR might be part of a static member access?
throw new NoFieldException(name, expr);
}
is_static = Modifier.isStatic(f.getModifiers());
if (!is_static)
if (inStaticMethod)
throw new CompileError(
"not available in a static method: " + name);
else
bytecode.addAload(0); // this
}
else if (expr instanceof Expr) {
Expr e = (Expr)expr;
int op = e.getOperator();
if (op == MEMBER) {
f = lookupJavaField(((Symbol)e.oprand1()).get(),
(Symbol)e.oprand2());
is_static = true;
}
else if (op == '.') {
try {
e.oprand1().accept(this);
if (exprType == CLASS && arrayDim == 0)
f = lookupJvmField(className, (Symbol)e.oprand2());
else
badLvalue();
is_static = Modifier.isStatic(f.getModifiers());
if (is_static)
bytecode.addOpcode(POP);
}
catch (NoFieldException nfe) {
if (nfe.getExpr() != e.oprand1())
throw nfe;
Symbol fname = (Symbol)e.oprand2();
// it should be a static field.
try {
f = lookupJvmField(nfe.getField(), fname);
is_static = true;
}
catch (CompileError ce) {
// EXPR might be part of a qualified class name.
throw new NoFieldException(nfe.getField() + "/"
+ fname.get(), expr);
}
}
}
else
badLvalue();
}
else
badLvalue();
resultStatic = is_static;
return f;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public InputStream openClassfile(String classname) {
try {
if (packageName == null || classname.startsWith(packageName)) {
String jarname
= directory + classname.replace('.', '/') + ".class";
URLConnection con = fetchClass0(hostname, port, jarname);
return con.getInputStream();
}
}
catch (IOException e) {}
return null; // not found
}
#location 7
#vulnerability type RESOURCE_LEAK
|
#fixed code
public InputStream openClassfile(String classname) {
try {
URLConnection con = openClassfile0(classname);
if (con != null)
return con.getInputStream();
}
catch (IOException e) {}
return null; // not found
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void setSuperclass(String superclass)
throws CannotCompileException
{
if (constPool.getClassInfo(superClass).equals("java.lang.Object")) {
if (superclass != null)
try {
superClass = constPool.addClassInfo(superclass);
setSuperclass2(superclass);
}
catch (BadBytecode e) {
throw new CannotCompileException(e);
}
}
else {
if (superclass == null)
superclass = "java.lang.Object";
renameClass(constPool.getClassInfo(superClass), superclass);
}
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public void setSuperclass(String superclass)
throws CannotCompileException
{
if (superclass == null)
superclass = "java.lang.Object";
try {
superClass = constPool.addClassInfo(superclass);
LinkedList list = methods;
int n = list.size();
for (int i = 0; i < n; ++i) {
MethodInfo minfo = (MethodInfo)list.get(i);
minfo.setSuperclass(superclass);
}
}
catch (BadBytecode e) {
throw new CannotCompileException(e);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
expr.oprand1().accept(this);
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
oprand.accept(this);
if (!isConstant(expr, token, oprand))
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public CtField lookupField(ASTList className, Symbol fieldName)
throws CompileError
{
return lookupField2(Declarator.astToClassName(className, '.'),
fieldName);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public CtField lookupField(ASTList className, Symbol fieldName)
throws CompileError
{
return lookupJavaField(Declarator.astToClassName(className, '.'),
fieldName);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
expr.oprand1().accept(this);
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
oprand.accept(this);
if (!isConstant(expr, token, oprand))
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
protected CtField fieldAccess(ASTree expr) throws CompileError {
if (expr instanceof Member) {
String name = ((Member)expr).get();
try {
return thisClass.getField(name);
}
catch (NotFoundException e) {
// EXPR might be part of a static member access?
throw new NoFieldException(name, expr);
}
}
else if (expr instanceof Expr) {
Expr e = (Expr)expr;
int op = e.getOperator();
if (op == MEMBER)
return resolver.lookupField(((Symbol)e.oprand1()).get(),
(Symbol)e.oprand2());
else if (op == '.')
try {
e.oprand1().accept(this);
if (exprType == CLASS && arrayDim == 0)
return resolver.lookupFieldByJvmName(className,
(Symbol)e.oprand2());
}
catch (NoFieldException nfe) {
if (nfe.getExpr() != e.oprand1())
throw nfe;
/* EXPR should be a static field.
* If EXPR might be part of a qualified class name,
* lookupFieldByJvmName2() throws NoFieldException.
*/
Symbol fname = (Symbol)e.oprand2();
return resolver.lookupFieldByJvmName2(nfe.getField(),
fname, expr);
}
}
throw new CompileError("bad filed access");
}
#location 16
#vulnerability type NULL_DEREFERENCE
|
#fixed code
protected CtField fieldAccess(ASTree expr) throws CompileError {
if (expr instanceof Member) {
Member mem = (Member)expr;
String name = mem.get();
try {
CtField f = thisClass.getField(name);
if (Modifier.isStatic(f.getModifiers()))
mem.setField(f);
return f;
}
catch (NotFoundException e) {
// EXPR might be part of a static member access?
throw new NoFieldException(name, expr);
}
}
else if (expr instanceof Expr) {
Expr e = (Expr)expr;
int op = e.getOperator();
if (op == MEMBER) {
Member mem = (Member)e.oprand2();
CtField f
= resolver.lookupField(((Symbol)e.oprand1()).get(), mem);
mem.setField(f);
return f;
}
else if (op == '.')
try {
e.oprand1().accept(this);
if (exprType == CLASS && arrayDim == 0)
return resolver.lookupFieldByJvmName(className,
(Symbol)e.oprand2());
}
catch (NoFieldException nfe) {
if (nfe.getExpr() != e.oprand1())
throw nfe;
/* EXPR should be a static field.
* If EXPR might be part of a qualified class name,
* lookupFieldByJvmName2() throws NoFieldException.
*/
Member fname = (Member)e.oprand2();
String jvmClassName = nfe.getField();
CtField f = resolver.lookupFieldByJvmName2(jvmClassName,
fname, expr);
e.setOperator(MEMBER);
e.setOprand1(new Symbol(MemberResolver.jvmToJavaName(
jvmClassName)));
fname.setField(f);
return f;
}
}
throw new CompileError("bad filed access");
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public ClassFile getClassFile2() {
ClassFile cfile = classfile;
if (cfile != null)
return cfile;
if (readCounter++ > READ_THRESHOLD) {
getCounter += 2;
releaseClassFiles();
readCounter = 0;
}
if (rawClassfile != null) {
try {
classfile = new ClassFile(new DataInputStream(
new ByteArrayInputStream(rawClassfile)));
rawClassfile = null;
getCounter = GET_THRESHOLD;
return classfile;
}
catch (IOException e) {
throw new RuntimeException(e.toString(), e);
}
}
InputStream fin = null;
try {
fin = classPool.openClassfile(getName());
if (fin == null)
throw new NotFoundException(getName());
fin = new BufferedInputStream(fin);
ClassFile cf = new ClassFile(new DataInputStream(fin));
if (!cf.getName().equals(qualifiedName))
throw new RuntimeException("cannot find " + qualifiedName + ": "
+ cf.getName() + " found in "
+ qualifiedName.replace('.', '/') + ".class");
classfile = cf;
return cf;
}
catch (NotFoundException e) {
throw new RuntimeException(e.toString(), e);
}
catch (IOException e) {
throw new RuntimeException(e.toString(), e);
}
finally {
if (fin != null)
try {
fin.close();
}
catch (IOException e) {}
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION
|
#fixed code
public ClassFile getClassFile2() {
ClassFile cfile = classfile;
if (cfile != null)
return cfile;
if (readCounter++ > READ_THRESHOLD) {
releaseClassFiles();
readCounter = 0;
}
if (rawClassfile != null) {
try {
classfile = new ClassFile(new DataInputStream(
new ByteArrayInputStream(rawClassfile)));
rawClassfile = null;
getCounter = GET_THRESHOLD;
return classfile;
}
catch (IOException e) {
throw new RuntimeException(e.toString(), e);
}
}
InputStream fin = null;
try {
fin = classPool.openClassfile(getName());
if (fin == null)
throw new NotFoundException(getName());
fin = new BufferedInputStream(fin);
ClassFile cf = new ClassFile(new DataInputStream(fin));
if (!cf.getName().equals(qualifiedName))
throw new RuntimeException("cannot find " + qualifiedName + ": "
+ cf.getName() + " found in "
+ qualifiedName.replace('.', '/') + ".class");
classfile = cf;
return cf;
}
catch (NotFoundException e) {
throw new RuntimeException(e.toString(), e);
}
catch (IOException e) {
throw new RuntimeException(e.toString(), e);
}
finally {
if (fin != null)
try {
fin.close();
}
catch (IOException e) {}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public ClassFile getClassFile2() {
if (classfile != null)
return classfile;
try {
byte[] b = classPool.readSource(getName());
DataInputStream dis
= new DataInputStream(new ByteArrayInputStream(b));
return (classfile = new ClassFile(dis));
}
catch (NotFoundException e) {
throw new RuntimeException(e.toString());
}
catch (IOException e) {
throw new RuntimeException(e.toString());
}
catch (CannotCompileException e) {
throw new RuntimeException(e.toString());
}
}
#location 9
#vulnerability type RESOURCE_LEAK
|
#fixed code
public ClassFile getClassFile2() {
if (classfile != null)
return classfile;
InputStream fin = null;
try {
fin = classPool.openClassfile(getName());
if (fin == null)
throw new NotFoundException(getName());
classfile = new ClassFile(new DataInputStream(fin));
return classfile;
}
catch (NotFoundException e) {
throw new RuntimeException(e.toString());
}
catch (IOException e) {
throw new RuntimeException(e.toString());
}
finally {
if (fin != null)
try {
fin.close();
}
catch (IOException e) {}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void renameClass(String oldName, String newName) {
LongVector v = items;
int size = numOfItems;
for (int i = 1; i < size; ++i)
((ConstInfo)v.elementAt(i)).renameClass(this, oldName, newName);
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public void renameClass(String oldName, String newName) {
LongVector v = items;
int size = numOfItems;
classes = new HashMap(classes.size() * 2);
for (int i = 1; i < size; ++i) {
ConstInfo ci = (ConstInfo)v.elementAt(i);
ci.renameClass(this, oldName, newName);
ci.makeHashtable(this);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void atBinExpr(BinExpr expr) throws CompileError {
int token = expr.getOperator();
int k = CodeGen.lookupBinOp(token);
if (k >= 0) {
/* arithmetic operators: +, -, *, /, %, |, ^, &, <<, >>, >>>
*/
if (token == '+') {
Expr e = atPlusExpr(expr);
if (e != null) {
/* String concatenation has been translated into
* an expression using StringBuffer.
*/
e = CallExpr.makeCall(Expr.make('.', e,
new Member("toString")), null);
expr.setLeft(e);
expr.setOprand2(null); // <---- look at this!
className = jvmJavaLangString;
}
}
else {
expr.oprand1().accept(this);
int type1 = exprType;
expr.oprand2().accept(this);
computeBinExprType(expr, token, type1);
}
}
else {
/* equation: &&, ||, ==, !=, <=, >=, <, >
*/
booleanExpr(expr);
}
}
#location 23
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public void atBinExpr(BinExpr expr) throws CompileError {
int token = expr.getOperator();
int k = CodeGen.lookupBinOp(token);
if (k >= 0) {
/* arithmetic operators: +, -, *, /, %, |, ^, &, <<, >>, >>>
*/
if (token == '+') {
Expr e = atPlusExpr(expr);
if (e != null) {
/* String concatenation has been translated into
* an expression using StringBuffer.
*/
e = CallExpr.makeCall(Expr.make('.', e,
new Member("toString")), null);
expr.setOprand1(e);
expr.setOprand2(null); // <---- look at this!
className = jvmJavaLangString;
}
}
else {
ASTree left = expr.oprand1();
ASTree right = expr.oprand2();
left.accept(this);
int type1 = exprType;
right.accept(this);
if (!isConstant(expr, token, left, right))
computeBinExprType(expr, token, type1);
}
}
else {
/* equation: &&, ||, ==, !=, <=, >=, <, >
*/
booleanExpr(expr);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private void atPlusPlus(int token, ASTree oprand, Expr expr)
throws CompileError
{
boolean isPost = oprand == null; // ++i or i++?
if (isPost)
oprand = expr.oprand2();
if (oprand instanceof Variable) {
Declarator d = ((Variable)oprand).getDeclarator();
exprType = d.getType();
arrayDim = d.getArrayDim();
}
else {
if (oprand instanceof Expr) {
Expr e = (Expr)oprand;
if (e.getOperator() == ARRAY) {
atArrayRead(expr.oprand1(), expr.oprand2());
// arrayDim should be 0.
int t = exprType;
if (t == INT || t == BYTE || t == CHAR || t == SHORT)
exprType = INT;
return;
}
}
atFieldPlusPlus(oprand);
}
}
#location 17
#vulnerability type NULL_DEREFERENCE
|
#fixed code
private void atPlusPlus(int token, ASTree oprand, Expr expr)
throws CompileError
{
boolean isPost = oprand == null; // ++i or i++?
if (isPost)
oprand = expr.oprand2();
if (oprand instanceof Variable) {
Declarator d = ((Variable)oprand).getDeclarator();
exprType = d.getType();
arrayDim = d.getArrayDim();
}
else {
if (oprand instanceof Expr) {
Expr e = (Expr)oprand;
if (e.getOperator() == ARRAY) {
atArrayRead(e.oprand1(), e.oprand2());
// arrayDim should be 0.
int t = exprType;
if (t == INT || t == BYTE || t == CHAR || t == SHORT)
exprType = INT;
return;
}
}
atFieldPlusPlus(oprand);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public void renameClass(Map classnames) {
LongVector v = items;
int size = numOfItems;
for (int i = 1; i < size; ++i)
((ConstInfo)v.elementAt(i)).renameClass(this, classnames);
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public void renameClass(Map classnames) {
LongVector v = items;
int size = numOfItems;
classes = new HashMap(classes.size() * 2);
for (int i = 1; i < size; ++i) {
ConstInfo ci = (ConstInfo)v.elementAt(i);
ci.renameClass(this, classnames);
ci.makeHashtable(this);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public Class toClass()
throws NotFoundException, IOException, CannotCompileException
{
return getClassPool().toClass(this);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public Class toClass()
throws CannotCompileException
{
return toClass(Thread.currentThread().getContextClassLoader());
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
protected CtField fieldAccess(ASTree expr) throws CompileError {
CtField f = null;
boolean is_static = false;
if (expr instanceof Member) {
String name = ((Member)expr).get();
try {
f = thisClass.getField(name);
}
catch (NotFoundException e) {
// EXPR might be part of a static member access?
throw new NoFieldException(name, expr);
}
is_static = Modifier.isStatic(f.getModifiers());
if (!is_static)
if (inStaticMethod)
throw new CompileError(
"not available in a static method: " + name);
else
bytecode.addAload(0); // this
}
else if (expr instanceof Expr) {
Expr e = (Expr)expr;
int op = e.getOperator();
if (op == MEMBER) {
f = lookupField((ASTList)e.oprand1(), (Symbol)e.oprand2());
is_static = true;
}
else if (op == '.') {
try {
e.oprand1().accept(this);
if (exprType == CLASS && arrayDim == 0)
f = lookupField(className, (Symbol)e.oprand2());
else
badLvalue();
is_static = Modifier.isStatic(f.getModifiers());
if (is_static)
bytecode.addOpcode(POP);
}
catch (NoFieldException nfe) {
if (nfe.getExpr() != e.oprand1())
throw nfe;
Symbol fname = (Symbol)e.oprand2();
// it should be a static field.
try {
f = lookupField(nfe.getField(), fname);
is_static = true;
}
catch (CompileError ce) {
// EXPR might be part of a qualified class name.
throw new NoFieldException(nfe.getField() + "/"
+ fname.get(), expr);
}
}
}
else
badLvalue();
}
else
badLvalue();
resultStatic = is_static;
return f;
}
#location 26
#vulnerability type NULL_DEREFERENCE
|
#fixed code
protected CtField fieldAccess(ASTree expr) throws CompileError {
CtField f = null;
boolean is_static = false;
if (expr instanceof Member) {
String name = ((Member)expr).get();
try {
f = thisClass.getField(name);
}
catch (NotFoundException e) {
// EXPR might be part of a static member access?
throw new NoFieldException(name, expr);
}
is_static = Modifier.isStatic(f.getModifiers());
if (!is_static)
if (inStaticMethod)
throw new CompileError(
"not available in a static method: " + name);
else
bytecode.addAload(0); // this
}
else if (expr instanceof Expr) {
Expr e = (Expr)expr;
int op = e.getOperator();
if (op == MEMBER) {
f = lookupJavaField(((Symbol)e.oprand1()).get(),
(Symbol)e.oprand2());
is_static = true;
}
else if (op == '.') {
try {
e.oprand1().accept(this);
if (exprType == CLASS && arrayDim == 0)
f = lookupJvmField(className, (Symbol)e.oprand2());
else
badLvalue();
is_static = Modifier.isStatic(f.getModifiers());
if (is_static)
bytecode.addOpcode(POP);
}
catch (NoFieldException nfe) {
if (nfe.getExpr() != e.oprand1())
throw nfe;
Symbol fname = (Symbol)e.oprand2();
// it should be a static field.
try {
f = lookupJvmField(nfe.getField(), fname);
is_static = true;
}
catch (CompileError ce) {
// EXPR might be part of a qualified class name.
throw new NoFieldException(nfe.getField() + "/"
+ fname.get(), expr);
}
}
}
else
badLvalue();
}
else
badLvalue();
resultStatic = is_static;
return f;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private String getUrlContents(String urlString) throws Exception {
System.setProperty ("jsse.enableSNIExtension", "false");
URL url = new URL(urlString);
BufferedReader in = new BufferedReader(
new InputStreamReader(url.openStream()));
String inputLine;
StringBuilder contents = new StringBuilder();
while ((inputLine = in.readLine()) != null)
contents.append(inputLine);
in.close();
return contents.toString();
}
#location 12
#vulnerability type RESOURCE_LEAK
|
#fixed code
private String getUrlContents(String urlString) throws Exception {
System.setProperty ("jsse.enableSNIExtension", "false");
URL url = new URL(urlString);
URLConnection urlc = url.openConnection();
urlc.setRequestProperty("Accept", "application/json, */*");
urlc.connect();
StringBuilder contents = new StringBuilder();
InputStream in = urlc.getInputStream();
for(int i = 0;i!= -1;i= in.read()){
char c = (char)i;
if(!Character.isISOControl(c))
contents.append((char)i);
}
in.close();
return contents.toString();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testParsingErrorPositionLargeInput() throws IOException {
// 2048 is the buffer size, this will allow us to test position
// information for large input that needs to be buffered
char[] in = new char[2048 + 7];
in[0] = '[';
for (int i = 1; i < 2046; i++) in[i] = '1';
in[2046] = ',';
in[2047] = '\n';
in[2048] = '3';
in[2049] = '3';
in[2050] = ',';
in[2051] = '\n';
in[2052] = '5';
in[2053] = 'x';
in[2054] = ']';
/* looks like :
* [11111.....111,
* 3,
* 5x]
*/
@SuppressWarnings("resource")
JsonReader reader = new JsonReader(new CharArrayReader(in), strictDoubleParse, readMetadata);
try {
System.out.println(in);
for (reader.beginArray(); reader.hasNext();) {
reader.next();
reader.valueAsDouble();
}
System.out.println(reader.valueAsInt());
fail();
} catch (JsonStreamException e) {
e.printStackTrace();
assertEquals(2, e.getRow());
assertEquals(1, e.getColumn());
}
}
#location 31
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test
public void testParsingErrorPositionLargeInput() throws IOException {
// 2048 is the buffer size, this will allow us to test position
// information for large input that needs to be buffered
char[] in = new char[2048 + 7];
in[0] = '[';
for (int i = 1; i < 2046; i++) in[i] = '1';
in[2046] = ',';
in[2047] = '\n';
in[2048] = '3';
in[2049] = '3';
in[2050] = ',';
in[2051] = '\n';
in[2052] = '5';
in[2053] = 'x';
in[2054] = ']';
/* looks like :
* [11111.....111,
* 3,
* 5x]
*/
@SuppressWarnings("resource")
JsonReader reader = new JsonReader(new CharArrayReader(in), strictDoubleParse, readMetadata);
try {
for (reader.beginArray(); reader.hasNext();) {
reader.next();
reader.valueAsDouble();
}
reader.endArray();
fail();
} catch (JsonStreamException e) {
assertEquals(2, e.getRow());
assertEquals(1, e.getColumn());
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test public void testMultipleCallsTonextObjectMetadata() throws IOException {
String src = "{\"@class\" : \"theclass\"" +
", \"@author\":\"me\"" +
", \"@comment\":\"no comment\"}";
JsonReader reader = new JsonReader(new StringReader(src));
assertEquals("theclass", reader.nextObjectMetadata().nextObjectMetadata().metadata("class"));
assertEquals("theclass", reader.nextObjectMetadata().metadata("class"));
assertEquals("no comment", reader.metadata("comment"));
assertEquals("no comment", reader.nextObjectMetadata().metadata("comment"));
assertEquals("me", reader.beginObject().metadata("author"));
reader.endObject();
}
#location 11
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test public void testMultipleCallsTonextObjectMetadata() throws IOException {
String src = "{\"@class\" : \"theclass\"" +
", \"@author\":\"me\"" +
", \"@comment\":\"no comment\"}";
JsonReader reader = new JsonReader(new StringReader(src));
assertEquals("theclass", reader.nextObjectMetadata().nextObjectMetadata().metadata("class"));
assertEquals("theclass", reader.nextObjectMetadata().metadata("class"));
assertEquals("no comment", reader.metadata("comment"));
assertEquals("no comment", reader.nextObjectMetadata().metadata("comment"));
assertEquals("me", reader.beginObject().metadata("author"));
reader.endObject();
reader.close();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public <T> String serialize(T o, GenericType<T> type) throws TransformationException,
IOException {
JsonWriter writer = new JsonWriter(new StringWriter(), skipNull, htmlSafe);
if (o == null)
nullConverter.serialize(null, writer, null);
else
serialize(o, type.getType(), writer, new Context(this));
writer.flush();
return writer.unwrap().toString();
}
#location 9
#vulnerability type RESOURCE_LEAK
|
#fixed code
public <T> String serialize(T o, GenericType<T> type) throws TransformationException,
IOException {
StringWriter sw = new StringWriter();
ObjectWriter writer = createWriter(sw);
if (o == null)
nullConverter.serialize(null, writer, null);
else
serialize(o, type.getType(), writer, new Context(this));
writer.flush();
return sw.toString();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test public void testIllegalReadObjectInstedOfArray() throws IOException {
String src = "[1,2]";
JsonReader reader = new JsonReader(new StringReader(src));
try {
reader.beginObject();
fail();
} catch (IllegalStateException ise) {}
}
#location 5
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test public void testIllegalReadObjectInstedOfArray() throws IOException {
String src = "[1,2]";
JsonReader reader = new JsonReader(new StringReader(src));
try {
reader.beginObject();
fail();
} catch (IllegalStateException ise) {}
reader.close();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testReadMalformedJson() throws IOException {
String src = "";
JsonReader reader = new JsonReader(new StringReader(src), strictDoubleParse, readMetadata);
try {
reader.beginObject();
fail();
} catch (IllegalStateException ise) {
}
reader.close();
}
#location 9
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test
public void testReadMalformedJson() throws IOException {
String src = "";
JsonReader reader = new JsonReader(new StringReader(src), strictDoubleParse, readMetadata);
try {
reader.beginObject();
fail();
} catch (JsonStreamException ise) {
}
reader.close();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public <T> String serialize(T o) throws TransformationException, IOException {
JsonWriter writer = new JsonWriter(new StringWriter(), skipNull, htmlSafe);
if (o == null)
nullConverter.serialize(null, writer, null);
else
serialize(o, o.getClass(), writer, new Context(this));
writer.flush();
return writer.unwrap().toString();
}
#location 8
#vulnerability type RESOURCE_LEAK
|
#fixed code
public <T> String serialize(T o) throws TransformationException, IOException {
StringWriter sw = new StringWriter();
ObjectWriter writer = createWriter(sw);
if (o == null)
nullConverter.serialize(null, writer, null);
else
serialize(o, o.getClass(), writer, new Context(this));
writer.flush();
return sw.toString();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testIllegalReadObjectInstedOfArray() throws IOException {
String src = "[1,2]";
JsonReader reader = new JsonReader(new StringReader(src), strictDoubleParse, readMetadata);
try {
reader.beginObject();
fail();
} catch (IllegalStateException ise) {
}
reader.close();
}
#location 9
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test
public void testIllegalReadObjectInstedOfArray() throws IOException {
String src = "[1,2]";
JsonReader reader = new JsonReader(new StringReader(src), strictDoubleParse, readMetadata);
try {
reader.beginObject();
fail();
} catch (JsonStreamException ise) {
}
reader.close();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public boolean valueAsBoolean() throws IOException {
if (BOOLEAN == valueType) {
return _booleanValue;
}
if (STRING == valueType)
return "".equals(_stringValue) ? null : Boolean.valueOf(_stringValue);
if (NULL == valueType) return false;
throw new IllegalStateException("Readen value is not of type boolean");
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
#fixed code
public boolean valueAsBoolean() throws IOException {
if (BOOLEAN == valueType) {
return _booleanValue;
}
if (STRING == valueType)
return Boolean.parseBoolean(_stringValue);
if (NULL == valueType) return false;
throw new IllegalStateException("Readen value is not of type boolean");
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private final void newMisplacedTokenException(int cursor) {
if (_buflen < 0)
throw new IllegalStateException(
"Incomplete data or malformed json : encoutered end of stream.");
if (cursor < 0) cursor = 0;
int pos = (_position - valueAsString().length() - _buflen + cursor);
if (pos < 0) pos = 0;
throw new IllegalStateException("Encountred misplaced character '" + _buffer[cursor]
+ "' around position " + pos);
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
#fixed code
private final void newMisplacedTokenException(int cursor) {
if (_buflen < 0)
throw new IllegalStateException(
"Incomplete data or malformed json : encoutered end of stream.");
if (cursor < 0) cursor = 0;
int pos = _position - (_buflen - cursor);
if (pos < 0) pos = 0;
throw new IllegalStateException("Encountred misplaced character '" + _buffer[cursor]
+ "' around position " + pos);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public <T> String serialize(T o, Class<? extends BeanView<?>>... withViews)
throws TransformationException, IOException {
JsonWriter writer = new JsonWriter(new StringWriter(), skipNull, htmlSafe);
if (o == null)
nullConverter.serialize(null, writer, null);
else
serialize(o, o.getClass(), writer, new Context(this, Arrays.asList(withViews)));
writer.flush();
return writer.unwrap().toString();
}
#location 9
#vulnerability type RESOURCE_LEAK
|
#fixed code
public <T> String serialize(T o, Class<? extends BeanView<?>>... withViews)
throws TransformationException, IOException {
StringWriter sw = new StringWriter();
ObjectWriter writer = createWriter(sw);
if (o == null)
nullConverter.serialize(null, writer, null);
else
serialize(o, o.getClass(), writer, new Context(this, Arrays.asList(withViews)));
writer.flush();
return sw.toString();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private final void newWrongTokenException(String awaited, int cursor) {
// otherwise it fails when an error occurs on first character
if (cursor < 0) cursor = 0;
int pos = (_position - valueAsString().length() - _buflen + cursor);
if (pos < 0) pos = 0;
if (_buflen < 0)
throw new IllegalStateException(
"Incomplete data or malformed json : encoutered end of stream but expected "
+ awaited);
else
throw new IllegalStateException("Illegal character at position " + pos + " expected "
+ awaited + " but read '" + _buffer[cursor] + "' !");
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
#fixed code
private final void newWrongTokenException(String awaited, int cursor) {
// otherwise it fails when an error occurs on first character
if (cursor < 0) cursor = 0;
int pos = _position - (_buflen - cursor);
if (pos < 0) pos = 0;
if (_buflen < 0)
throw new IllegalStateException(
"Incomplete data or malformed json : encoutered end of stream but expected "
+ awaited);
else
throw new IllegalStateException("Illegal character at position " + pos + " expected "
+ awaited + " but read '" + _buffer[cursor] + "' !");
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test public void testIncompleteSource() throws IOException {
String src = "[1,";
JsonReader reader = new JsonReader(new StringReader(src));
try {
reader.beginArray();
reader.next();
reader.next();
fail();
} catch (IOException ioe) {}
}
#location 6
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test public void testIncompleteSource() throws IOException {
String src = "[1,";
JsonReader reader = new JsonReader(new StringReader(src));
try {
reader.beginArray();
reader.next();
reader.next();
fail();
} catch (IOException ioe) {}
reader.close();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public void run(Bootstrap bootstrap, Namespace args) {
// read and initialize arguments:
GraphHopperConfig graphHopperConfiguration = new GraphHopperConfig();
graphHopperConfiguration.putObject("graph.location", "graph-cache");
seed = args.getLong("seed");
count = args.getInt("count");
GraphHopper graphHopper = new GraphHopperOSM();
graphHopper.init(graphHopperConfiguration).forDesktop();
graphHopper.importOrLoad();
// and map-matching stuff
GraphHopperStorage graph = graphHopper.getGraphHopperStorage();
bbox = graph.getBounds();
LocationIndexTree locationIndex = (LocationIndexTree) graphHopper.getLocationIndex();
MapMatching mapMatching = new MapMatching(graphHopper, new HintsMap());
// start tests:
StopWatch sw = new StopWatch().start();
try {
printLocationIndexMatchQuery(locationIndex);
printTimeOfMapMatchQuery(graphHopper, mapMatching);
System.gc();
} catch (Exception ex) {
logger.error("Problem while measuring", ex);
properties.put("error", "" + ex.toString());
} finally {
properties.put("measurement.count", "" + count);
properties.put("measurement.seed", "" + seed);
properties.put("measurement.time", "" + sw.stop().getMillis());
System.gc();
properties.put("measurement.totalMB", "" + Helper.getTotalMB());
properties.put("measurement.usedMB", "" + Helper.getUsedMB());
try {
FileWriter fileWriter = new FileWriter(args.<File>get("outfile"));
for (Entry<String, String> e : properties.entrySet()) {
fileWriter.append(e.getKey());
fileWriter.append("=");
fileWriter.append(e.getValue());
fileWriter.append("\n");
}
fileWriter.flush();
} catch (IOException ex) {
logger.error(
"Problem while writing measurements", ex);
}
}
}
#location 43
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Override
public void run(Bootstrap bootstrap, Namespace args) {
// read and initialize arguments:
GraphHopperConfig graphHopperConfiguration = new GraphHopperConfig();
graphHopperConfiguration.setProfiles(Collections.singletonList(new ProfileConfig("fast_car").setVehicle("car").setWeighting("fastest")));
graphHopperConfiguration.putObject("graph.location", "graph-cache");
seed = args.getLong("seed");
count = args.getInt("count");
GraphHopper graphHopper = new GraphHopperOSM();
graphHopper.init(graphHopperConfiguration).forDesktop();
graphHopper.importOrLoad();
// and map-matching stuff
GraphHopperStorage graph = graphHopper.getGraphHopperStorage();
bbox = graph.getBounds();
LocationIndexTree locationIndex = (LocationIndexTree) graphHopper.getLocationIndex();
MapMatching mapMatching = new MapMatching(graphHopper, new HintsMap().putObject("profile", "fast_car"));
// start tests:
StopWatch sw = new StopWatch().start();
try {
printLocationIndexMatchQuery(locationIndex);
printTimeOfMapMatchQuery(graphHopper, mapMatching);
System.gc();
} catch (Exception ex) {
logger.error("Problem while measuring", ex);
properties.put("error", "" + ex.toString());
} finally {
properties.put("measurement.count", "" + count);
properties.put("measurement.seed", "" + seed);
properties.put("measurement.time", "" + sw.stop().getMillis());
System.gc();
properties.put("measurement.totalMB", "" + Helper.getTotalMB());
properties.put("measurement.usedMB", "" + Helper.getUsedMB());
try {
FileWriter fileWriter = new FileWriter(args.<File>get("outfile"));
for (Entry<String, String> e : properties.entrySet()) {
fileWriter.append(e.getKey());
fileWriter.append("=");
fileWriter.append(e.getValue());
fileWriter.append("\n");
}
fileWriter.flush();
} catch (IOException ex) {
logger.error(
"Problem while writing measurements", ex);
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public void run(Bootstrap bootstrap, Namespace args) {
// read and initialize arguments:
CmdArgs graphHopperConfiguration = new CmdArgs();
graphHopperConfiguration.put("graph.location", "graph-cache");
seed = args.getLong("seed");
count = args.getInt("count");
GraphHopper graphHopper = new GraphHopperOSM();
graphHopper.init(graphHopperConfiguration).forDesktop();
graphHopper.getCHFactoryDecorator().setEnabled(false);
graphHopper.getCHFactoryDecorator().setDisablingAllowed(true);
graphHopper.importOrLoad();
// and map-matching stuff
GraphHopperStorage graph = graphHopper.getGraphHopperStorage();
bbox = graph.getBounds();
LocationIndexTree locationIndex = (LocationIndexTree) graphHopper.getLocationIndex();
AlgorithmOptions algoOpts = AlgorithmOptions.start()
.maxVisitedNodes((int) 1e20)
.build();
MapMatching mapMatching = new MapMatching(graphHopper, algoOpts);
// start tests:
StopWatch sw = new StopWatch().start();
try {
printLocationIndexMatchQuery(locationIndex);
printTimeOfMapMatchQuery(graphHopper, mapMatching);
System.gc();
} catch (Exception ex) {
logger.error("Problem while measuring", ex);
properties.put("error", "" + ex.toString());
} finally {
properties.put("measurement.count", "" + count);
properties.put("measurement.seed", "" + seed);
properties.put("measurement.time", "" + sw.stop().getMillis());
System.gc();
properties.put("measurement.totalMB", "" + Helper.getTotalMB());
properties.put("measurement.usedMB", "" + Helper.getUsedMB());
try {
FileWriter fileWriter = new FileWriter(args.<File>get("outfile"));
for (Entry<String, String> e : properties.entrySet()) {
fileWriter.append(e.getKey());
fileWriter.append("=");
fileWriter.append(e.getValue());
fileWriter.append("\n");
}
fileWriter.flush();
} catch (IOException ex) {
logger.error(
"Problem while writing measurements", ex);
}
}
}
#location 48
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Override
public void run(Bootstrap bootstrap, Namespace args) {
// read and initialize arguments:
CmdArgs graphHopperConfiguration = new CmdArgs();
graphHopperConfiguration.put("graph.location", "graph-cache");
seed = args.getLong("seed");
count = args.getInt("count");
GraphHopper graphHopper = new GraphHopperOSM();
graphHopper.init(graphHopperConfiguration).forDesktop();
graphHopper.getCHFactoryDecorator().setEnabled(false);
graphHopper.getCHFactoryDecorator().setDisablingAllowed(true);
graphHopper.importOrLoad();
// and map-matching stuff
GraphHopperStorage graph = graphHopper.getGraphHopperStorage();
bbox = graph.getBounds();
LocationIndexTree locationIndex = (LocationIndexTree) graphHopper.getLocationIndex();
MapMatching mapMatching = new MapMatching(graphHopper, new HintsMap());
// start tests:
StopWatch sw = new StopWatch().start();
try {
printLocationIndexMatchQuery(locationIndex);
printTimeOfMapMatchQuery(graphHopper, mapMatching);
System.gc();
} catch (Exception ex) {
logger.error("Problem while measuring", ex);
properties.put("error", "" + ex.toString());
} finally {
properties.put("measurement.count", "" + count);
properties.put("measurement.seed", "" + seed);
properties.put("measurement.time", "" + sw.stop().getMillis());
System.gc();
properties.put("measurement.totalMB", "" + Helper.getTotalMB());
properties.put("measurement.usedMB", "" + Helper.getUsedMB());
try {
FileWriter fileWriter = new FileWriter(args.<File>get("outfile"));
for (Entry<String, String> e : properties.entrySet()) {
fileWriter.append(e.getKey());
fileWriter.append("=");
fileWriter.append(e.getValue());
fileWriter.append("\n");
}
fileWriter.flush();
} catch (IOException ex) {
logger.error(
"Problem while writing measurements", ex);
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public void run(Bootstrap bootstrap, Namespace args) {
// read and initialize arguments:
GraphHopperConfig graphHopperConfiguration = new GraphHopperConfig();
graphHopperConfiguration.putObject("graph.location", "graph-cache");
seed = args.getLong("seed");
count = args.getInt("count");
GraphHopper graphHopper = new GraphHopperOSM();
graphHopper.init(graphHopperConfiguration).forDesktop();
graphHopper.importOrLoad();
// and map-matching stuff
GraphHopperStorage graph = graphHopper.getGraphHopperStorage();
bbox = graph.getBounds();
LocationIndexTree locationIndex = (LocationIndexTree) graphHopper.getLocationIndex();
MapMatching mapMatching = new MapMatching(graphHopper, new HintsMap());
// start tests:
StopWatch sw = new StopWatch().start();
try {
printLocationIndexMatchQuery(locationIndex);
printTimeOfMapMatchQuery(graphHopper, mapMatching);
System.gc();
} catch (Exception ex) {
logger.error("Problem while measuring", ex);
properties.put("error", "" + ex.toString());
} finally {
properties.put("measurement.count", "" + count);
properties.put("measurement.seed", "" + seed);
properties.put("measurement.time", "" + sw.stop().getMillis());
System.gc();
properties.put("measurement.totalMB", "" + Helper.getTotalMB());
properties.put("measurement.usedMB", "" + Helper.getUsedMB());
try {
FileWriter fileWriter = new FileWriter(args.<File>get("outfile"));
for (Entry<String, String> e : properties.entrySet()) {
fileWriter.append(e.getKey());
fileWriter.append("=");
fileWriter.append(e.getValue());
fileWriter.append("\n");
}
fileWriter.flush();
} catch (IOException ex) {
logger.error(
"Problem while writing measurements", ex);
}
}
}
#location 44
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Override
public void run(Bootstrap bootstrap, Namespace args) {
// read and initialize arguments:
GraphHopperConfig graphHopperConfiguration = new GraphHopperConfig();
graphHopperConfiguration.setProfiles(Collections.singletonList(new ProfileConfig("fast_car").setVehicle("car").setWeighting("fastest")));
graphHopperConfiguration.putObject("graph.location", "graph-cache");
seed = args.getLong("seed");
count = args.getInt("count");
GraphHopper graphHopper = new GraphHopperOSM();
graphHopper.init(graphHopperConfiguration).forDesktop();
graphHopper.importOrLoad();
// and map-matching stuff
GraphHopperStorage graph = graphHopper.getGraphHopperStorage();
bbox = graph.getBounds();
LocationIndexTree locationIndex = (LocationIndexTree) graphHopper.getLocationIndex();
MapMatching mapMatching = new MapMatching(graphHopper, new HintsMap().putObject("profile", "fast_car"));
// start tests:
StopWatch sw = new StopWatch().start();
try {
printLocationIndexMatchQuery(locationIndex);
printTimeOfMapMatchQuery(graphHopper, mapMatching);
System.gc();
} catch (Exception ex) {
logger.error("Problem while measuring", ex);
properties.put("error", "" + ex.toString());
} finally {
properties.put("measurement.count", "" + count);
properties.put("measurement.seed", "" + seed);
properties.put("measurement.time", "" + sw.stop().getMillis());
System.gc();
properties.put("measurement.totalMB", "" + Helper.getTotalMB());
properties.put("measurement.usedMB", "" + Helper.getUsedMB());
try {
FileWriter fileWriter = new FileWriter(args.<File>get("outfile"));
for (Entry<String, String> e : properties.entrySet()) {
fileWriter.append(e.getKey());
fileWriter.append("=");
fileWriter.append(e.getValue());
fileWriter.append("\n");
}
fileWriter.flush();
} catch (IOException ex) {
logger.error(
"Problem while writing measurements", ex);
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
public void run(Bootstrap bootstrap, Namespace args) {
// read and initialize arguments:
CmdArgs graphHopperConfiguration = new CmdArgs();
graphHopperConfiguration.put("graph.location", "graph-cache");
seed = args.getLong("seed");
count = args.getInt("count");
GraphHopper graphHopper = new GraphHopperOSM();
graphHopper.init(graphHopperConfiguration).forDesktop();
graphHopper.getCHFactoryDecorator().setEnabled(false);
graphHopper.getCHFactoryDecorator().setDisablingAllowed(true);
graphHopper.importOrLoad();
// and map-matching stuff
GraphHopperStorage graph = graphHopper.getGraphHopperStorage();
bbox = graph.getBounds();
LocationIndexTree locationIndex = (LocationIndexTree) graphHopper.getLocationIndex();
AlgorithmOptions algoOpts = AlgorithmOptions.start()
.maxVisitedNodes((int) 1e20)
.build();
MapMatching mapMatching = new MapMatching(graphHopper, algoOpts);
// start tests:
StopWatch sw = new StopWatch().start();
try {
printLocationIndexMatchQuery(locationIndex);
printTimeOfMapMatchQuery(graphHopper, mapMatching);
System.gc();
} catch (Exception ex) {
logger.error("Problem while measuring", ex);
properties.put("error", "" + ex.toString());
} finally {
properties.put("measurement.count", "" + count);
properties.put("measurement.seed", "" + seed);
properties.put("measurement.time", "" + sw.stop().getMillis());
System.gc();
properties.put("measurement.totalMB", "" + Helper.getTotalMB());
properties.put("measurement.usedMB", "" + Helper.getUsedMB());
try {
FileWriter fileWriter = new FileWriter(args.<File>get("outfile"));
for (Entry<String, String> e : properties.entrySet()) {
fileWriter.append(e.getKey());
fileWriter.append("=");
fileWriter.append(e.getValue());
fileWriter.append("\n");
}
fileWriter.flush();
} catch (IOException ex) {
logger.error(
"Problem while writing measurements", ex);
}
}
}
#location 49
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Override
public void run(Bootstrap bootstrap, Namespace args) {
// read and initialize arguments:
CmdArgs graphHopperConfiguration = new CmdArgs();
graphHopperConfiguration.put("graph.location", "graph-cache");
seed = args.getLong("seed");
count = args.getInt("count");
GraphHopper graphHopper = new GraphHopperOSM();
graphHopper.init(graphHopperConfiguration).forDesktop();
graphHopper.getCHFactoryDecorator().setEnabled(false);
graphHopper.getCHFactoryDecorator().setDisablingAllowed(true);
graphHopper.importOrLoad();
// and map-matching stuff
GraphHopperStorage graph = graphHopper.getGraphHopperStorage();
bbox = graph.getBounds();
LocationIndexTree locationIndex = (LocationIndexTree) graphHopper.getLocationIndex();
MapMatching mapMatching = new MapMatching(graphHopper, new HintsMap());
// start tests:
StopWatch sw = new StopWatch().start();
try {
printLocationIndexMatchQuery(locationIndex);
printTimeOfMapMatchQuery(graphHopper, mapMatching);
System.gc();
} catch (Exception ex) {
logger.error("Problem while measuring", ex);
properties.put("error", "" + ex.toString());
} finally {
properties.put("measurement.count", "" + count);
properties.put("measurement.seed", "" + seed);
properties.put("measurement.time", "" + sw.stop().getMillis());
System.gc();
properties.put("measurement.totalMB", "" + Helper.getTotalMB());
properties.put("measurement.usedMB", "" + Helper.getUsedMB());
try {
FileWriter fileWriter = new FileWriter(args.<File>get("outfile"));
for (Entry<String, String> e : properties.entrySet()) {
fileWriter.append(e.getKey());
fileWriter.append("=");
fileWriter.append(e.getValue());
fileWriter.append("\n");
}
fileWriter.flush();
} catch (IOException ex) {
logger.error(
"Problem while writing measurements", ex);
}
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Test
public void testEvaluate() {
CosineSimilarityUDF cosine = new CosineSimilarityUDF();
{
List<String> ftvec1 = Arrays.asList("bbb:1.4", "aaa:0.9", "ccc");
Assert.assertEquals(1.f, cosine.evaluate(ftvec1, ftvec1).get(), 0.0);
}
Assert.assertEquals(0.f, cosine.evaluate(Arrays.asList("a", "b", "c"), Arrays.asList("d", "e")).get(), 0.0);
Assert.assertEquals(0.f, cosine.evaluate(Arrays.asList("a", "b", "c"), Arrays.asList("d", "e")).get(), 0.0);
Assert.assertEquals(1.f, cosine.evaluate(Arrays.asList("a", "b"), Arrays.asList("a", "b")).get(), 0.0);
Assert.assertEquals(0.5f, cosine.evaluate(Arrays.asList("a", "b"), Arrays.asList("a", "c")).get(), 0.0);
Assert.assertEquals(-1.f, cosine.evaluate(Arrays.asList("a:1.0"), Arrays.asList("a:-1.0")).get(), 0.0);
Assert.assertTrue(cosine.evaluate(Arrays.asList("apple", "orange"), Arrays.asList("banana", "apple")).get() > 0.f);
Assert.assertTrue(cosine.evaluate(Arrays.asList("apple", "orange"), Arrays.asList("banana", "apple")).get() > 0.f);
Assert.assertTrue((cosine.evaluate(Arrays.asList("apple", "orange"), Arrays.asList("banana", "orange", "apple"))).get() > (cosine.evaluate(Arrays.asList("apple", "orange"), Arrays.asList("banana", "orange"))).get());
Assert.assertEquals(1.0f, cosine.evaluate(Arrays.asList("This is a sentence with seven tokens".split(" ")), Arrays.<String> asList("This is a sentence with seven tokens".split(" "))).get(), 0.0);
Assert.assertEquals(1.0f, cosine.evaluate(Arrays.asList("This is a sentence with seven tokens".split(" ")), Arrays.<String> asList("This is a sentence with seven tokens".split(" "))).get(), 0.0);
{
List<String> tokens1 = Arrays.asList("1:1,2:1,3:1,4:1,5:0,6:1,7:1,8:1,9:0,10:1,11:1".split(","));
List<String> tokens2 = Arrays.asList("1:1,2:1,3:0,4:1,5:1,6:1,7:1,8:0,9:1,10:1,11:1".split(","));
Assert.assertEquals(0.77777f, cosine.evaluate(tokens1, tokens2).get(), 0.00001f);
}
{
List<String> tokens1 = Arrays.asList("1 2 3 4 6 7 8 10 11".split("\\s+"));
List<String> tokens2 = Arrays.asList("1 2 4 5 6 7 9 10 11".split("\\s+"));
double dotp = 1 + 1 + 0 + 1 + 0 + 1 + 1 + 0 + 0 + 1 + 1;
double norm = Math.sqrt(tokens1.size()) * Math.sqrt(tokens2.size());
Assert.assertEquals(dotp / norm, cosine.evaluate(tokens1, tokens2).get(), 0.00001f);
Assert.assertEquals(dotp / norm, cosine.evaluate(tokens1, tokens2).get(), 0.00001f);
Assert.assertEquals(dotp / norm, cosine.evaluate(Arrays.asList("1", "2", "3", "4", "6", "7", "8", "10", "11"), Arrays.asList("1", "2", "4", "5", "6", "7", "9", "10", "11")).get(), 0.00001f);
}
Assert.assertEquals(0.f, cosine.evaluate(Arrays.asList("1", "2", "3"), Arrays.asList("4", "5")).get(), 0.0);
Assert.assertEquals(1.f, cosine.evaluate(Arrays.asList("1", "2"), Arrays.asList("1", "2")).get(), 0.0);
}
#location 45
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Test
public void testEvaluate() throws IOException {
{
List<String> ftvec1 = Arrays.asList("bbb:1.4", "aaa:0.9", "ccc");
Assert.assertEquals(1.f, CosineSimilarityUDF.cosineSimilarity(ftvec1, ftvec1), 0.0);
}
Assert.assertEquals(0.f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("a", "b", "c"), Arrays.asList("d", "e")), 0.0);
Assert.assertEquals(0.f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("a", "b", "c"), Arrays.asList("d", "e")), 0.0);
Assert.assertEquals(1.f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("a", "b"), Arrays.asList("a", "b")), 0.0);
Assert.assertEquals(0.5f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("a", "b"), Arrays.asList("a", "c")), 0.0);
Assert.assertEquals(-1.f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("a:1.0"), Arrays.asList("a:-1.0")), 0.0);
Assert.assertTrue(CosineSimilarityUDF.cosineSimilarity(Arrays.asList("apple", "orange"), Arrays.asList("banana", "apple")) > 0.f);
Assert.assertTrue(CosineSimilarityUDF.cosineSimilarity(Arrays.asList("apple", "orange"), Arrays.asList("banana", "apple")) > 0.f);
Assert.assertTrue((CosineSimilarityUDF.cosineSimilarity(Arrays.asList("apple", "orange"), Arrays.asList("banana", "orange", "apple"))) > (CosineSimilarityUDF.cosineSimilarity(Arrays.asList("apple", "orange"), Arrays.asList("banana", "orange"))));
Assert.assertEquals(1.0f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("This is a sentence with seven tokens".split(" ")), Arrays.<String> asList("This is a sentence with seven tokens".split(" "))), 0.0);
Assert.assertEquals(1.0f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("This is a sentence with seven tokens".split(" ")), Arrays.<String> asList("This is a sentence with seven tokens".split(" "))), 0.0);
{
List<String> tokens1 = Arrays.asList("1:1,2:1,3:1,4:1,5:0,6:1,7:1,8:1,9:0,10:1,11:1".split(","));
List<String> tokens2 = Arrays.asList("1:1,2:1,3:0,4:1,5:1,6:1,7:1,8:0,9:1,10:1,11:1".split(","));
Assert.assertEquals(0.77777f, CosineSimilarityUDF.cosineSimilarity(tokens1, tokens2), 0.00001f);
}
{
List<String> tokens1 = Arrays.asList("1 2 3 4 6 7 8 10 11".split("\\s+"));
List<String> tokens2 = Arrays.asList("1 2 4 5 6 7 9 10 11".split("\\s+"));
double dotp = 1 + 1 + 0 + 1 + 0 + 1 + 1 + 0 + 0 + 1 + 1;
double norm = Math.sqrt(tokens1.size()) * Math.sqrt(tokens2.size());
Assert.assertEquals(dotp / norm, CosineSimilarityUDF.cosineSimilarity(tokens1, tokens2), 0.00001f);
Assert.assertEquals(dotp / norm, CosineSimilarityUDF.cosineSimilarity(tokens1, tokens2), 0.00001f);
Assert.assertEquals(dotp / norm, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("1", "2", "3", "4", "6", "7", "8", "10", "11"), Arrays.asList("1", "2", "4", "5", "6", "7", "9", "10", "11")), 0.00001f);
}
Assert.assertEquals(0.f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("1", "2", "3"), Arrays.asList("4", "5")), 0.0);
Assert.assertEquals(1.f, CosineSimilarityUDF.cosineSimilarity(Arrays.asList("1", "2"), Arrays.asList("1", "2")), 0.0);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private static int evalPredict(DecisionTree tree, double[] x) throws HiveException, IOException {
String script = tree.predictCodegen();
System.out.println(script);
TreePredictTrustedUDF udf = new TreePredictTrustedUDF();
udf.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) });
IntWritable result = (IntWritable) udf.evaluate(script, x, true);
result = (IntWritable) udf.evaluate(script, x, true);
udf.close();
return result.get();
}
#location 11
#vulnerability type RESOURCE_LEAK
|
#fixed code
private static int evalPredict(DecisionTree tree, double[] x) throws HiveException, IOException {
String script = tree.predictCodegen();
System.out.println(script);
TreePredictByJavascriptUDF udf = new TreePredictByJavascriptUDF();
udf.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) });
IntWritable result = (IntWritable) udf.evaluate(script, x, true);
result = (IntWritable) udf.evaluate(script, x, true);
udf.close();
return result.get();
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private static long loadPredictionModel(PredictionModel model, File file, PrimitiveObjectInspector featureOI, WritableFloatObjectInspector weightOI, WritableFloatObjectInspector covarOI)
throws IOException, SerDeException {
long count = 0L;
if(!file.exists()) {
return count;
}
if(!file.getName().endsWith(".crc")) {
if(file.isDirectory()) {
for(File f : file.listFiles()) {
count += loadPredictionModel(model, f, featureOI, weightOI, covarOI);
}
} else {
LazySimpleSerDe serde = HiveUtils.getLineSerde(featureOI, weightOI, covarOI);
StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
StructField c1ref = lineOI.getStructFieldRef("c1");
StructField c2ref = lineOI.getStructFieldRef("c2");
StructField c3ref = lineOI.getStructFieldRef("c3");
PrimitiveObjectInspector c1oi = (PrimitiveObjectInspector) c1ref.getFieldObjectInspector();
FloatObjectInspector c2oi = (FloatObjectInspector) c2ref.getFieldObjectInspector();
FloatObjectInspector c3oi = (FloatObjectInspector) c3ref.getFieldObjectInspector();
final BufferedReader reader = HadoopUtils.getBufferedReader(file);
try {
String line;
while((line = reader.readLine()) != null) {
count++;
Text lineText = new Text(line);
Object lineObj = serde.deserialize(lineText);
List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
Object f0 = fields.get(0);
Object f1 = fields.get(1);
Object f2 = fields.get(2);
if(f0 == null || f1 == null) {
continue; // avoid unexpected case
}
Object k = c1oi.getPrimitiveWritableObject(c1oi.copyObject(f0));
float v = c2oi.get(f1);
float cov = (f2 == null) ? WeightValueWithCovar.DEFAULT_COVAR
: c3oi.get(f2);
model.set(k, new WeightValueWithCovar(v, cov, false));
}
} finally {
reader.close();
}
}
}
return count;
}
#location 43
#vulnerability type RESOURCE_LEAK
|
#fixed code
private static long loadPredictionModel(PredictionModel model, File file, PrimitiveObjectInspector featureOI, WritableFloatObjectInspector weightOI, WritableFloatObjectInspector covarOI)
throws IOException, SerDeException {
long count = 0L;
if(!file.exists()) {
return count;
}
if(!file.getName().endsWith(".crc")) {
if(file.isDirectory()) {
for(File f : file.listFiles()) {
count += loadPredictionModel(model, f, featureOI, weightOI, covarOI);
}
} else {
LazySimpleSerDe serde = HiveUtils.getLineSerde(featureOI, weightOI, covarOI);
StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
StructField c1ref = lineOI.getStructFieldRef("c1");
StructField c2ref = lineOI.getStructFieldRef("c2");
StructField c3ref = lineOI.getStructFieldRef("c3");
PrimitiveObjectInspector c1oi = (PrimitiveObjectInspector) c1ref.getFieldObjectInspector();
FloatObjectInspector c2oi = (FloatObjectInspector) c2ref.getFieldObjectInspector();
FloatObjectInspector c3oi = (FloatObjectInspector) c3ref.getFieldObjectInspector();
BufferedReader reader = null;
try {
reader = HadoopUtils.getBufferedReader(file);
String line;
while((line = reader.readLine()) != null) {
count++;
Text lineText = new Text(line);
Object lineObj = serde.deserialize(lineText);
List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
Object f0 = fields.get(0);
Object f1 = fields.get(1);
Object f2 = fields.get(2);
if(f0 == null || f1 == null) {
continue; // avoid unexpected case
}
Object k = c1oi.getPrimitiveWritableObject(c1oi.copyObject(f0));
float v = c2oi.get(f1);
float cov = (f2 == null) ? WeightValueWithCovar.DEFAULT_COVAR
: c3oi.get(f2);
model.set(k, new WeightValueWithCovar(v, cov, false));
}
} finally {
IOUtils.closeQuietly(reader);
}
}
}
return count;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private long loadPredictionModel(Map<Object, PredictionModel> label2model, File file, PrimitiveObjectInspector labelOI, PrimitiveObjectInspector featureOI, WritableFloatObjectInspector weightOI, WritableFloatObjectInspector covarOI)
throws IOException, SerDeException {
long count = 0L;
if(!file.exists()) {
return count;
}
if(!file.getName().endsWith(".crc")) {
if(file.isDirectory()) {
for(File f : file.listFiles()) {
count += loadPredictionModel(label2model, f, labelOI, featureOI, weightOI, covarOI);
}
} else {
LazySimpleSerDe serde = HiveUtils.getLineSerde(labelOI, featureOI, weightOI, covarOI);
StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
StructField c1ref = lineOI.getStructFieldRef("c1");
StructField c2ref = lineOI.getStructFieldRef("c2");
StructField c3ref = lineOI.getStructFieldRef("c3");
StructField c4ref = lineOI.getStructFieldRef("c4");
PrimitiveObjectInspector c1refOI = (PrimitiveObjectInspector) c1ref.getFieldObjectInspector();
PrimitiveObjectInspector c2refOI = (PrimitiveObjectInspector) c2ref.getFieldObjectInspector();
FloatObjectInspector c3refOI = (FloatObjectInspector) c3ref.getFieldObjectInspector();
FloatObjectInspector c4refOI = (FloatObjectInspector) c4ref.getFieldObjectInspector();
final BufferedReader reader = HadoopUtils.getBufferedReader(file);
try {
String line;
while((line = reader.readLine()) != null) {
count++;
Text lineText = new Text(line);
Object lineObj = serde.deserialize(lineText);
List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
Object f0 = fields.get(0);
Object f1 = fields.get(1);
Object f2 = fields.get(2);
Object f3 = fields.get(3);
if(f0 == null || f1 == null || f2 == null) {
continue; // avoid unexpected case
}
Object label = c1refOI.getPrimitiveWritableObject(c1refOI.copyObject(f0));
PredictionModel model = label2model.get(label);
if(model == null) {
model = createModel();
label2model.put(label, model);
}
Object k = c2refOI.getPrimitiveWritableObject(c2refOI.copyObject(f1));
float v = c3refOI.get(f2);
float cov = (f3 == null) ? WeightValueWithCovar.DEFAULT_COVAR
: c4refOI.get(f3);
model.set(k, new WeightValueWithCovar(v, cov, false));
}
} finally {
reader.close();
}
}
}
return count;
}
#location 52
#vulnerability type RESOURCE_LEAK
|
#fixed code
private long loadPredictionModel(Map<Object, PredictionModel> label2model, File file, PrimitiveObjectInspector labelOI, PrimitiveObjectInspector featureOI, WritableFloatObjectInspector weightOI, WritableFloatObjectInspector covarOI)
throws IOException, SerDeException {
long count = 0L;
if(!file.exists()) {
return count;
}
if(!file.getName().endsWith(".crc")) {
if(file.isDirectory()) {
for(File f : file.listFiles()) {
count += loadPredictionModel(label2model, f, labelOI, featureOI, weightOI, covarOI);
}
} else {
LazySimpleSerDe serde = HiveUtils.getLineSerde(labelOI, featureOI, weightOI, covarOI);
StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
StructField c1ref = lineOI.getStructFieldRef("c1");
StructField c2ref = lineOI.getStructFieldRef("c2");
StructField c3ref = lineOI.getStructFieldRef("c3");
StructField c4ref = lineOI.getStructFieldRef("c4");
PrimitiveObjectInspector c1refOI = (PrimitiveObjectInspector) c1ref.getFieldObjectInspector();
PrimitiveObjectInspector c2refOI = (PrimitiveObjectInspector) c2ref.getFieldObjectInspector();
FloatObjectInspector c3refOI = (FloatObjectInspector) c3ref.getFieldObjectInspector();
FloatObjectInspector c4refOI = (FloatObjectInspector) c4ref.getFieldObjectInspector();
BufferedReader reader = null;
try {
reader = HadoopUtils.getBufferedReader(file);
String line;
while((line = reader.readLine()) != null) {
count++;
Text lineText = new Text(line);
Object lineObj = serde.deserialize(lineText);
List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
Object f0 = fields.get(0);
Object f1 = fields.get(1);
Object f2 = fields.get(2);
Object f3 = fields.get(3);
if(f0 == null || f1 == null || f2 == null) {
continue; // avoid unexpected case
}
Object label = c1refOI.getPrimitiveWritableObject(c1refOI.copyObject(f0));
PredictionModel model = label2model.get(label);
if(model == null) {
model = createModel();
label2model.put(label, model);
}
Object k = c2refOI.getPrimitiveWritableObject(c2refOI.copyObject(f1));
float v = c3refOI.get(f2);
float cov = (f3 == null) ? WeightValueWithCovar.DEFAULT_COVAR
: c4refOI.get(f3);
model.set(k, new WeightValueWithCovar(v, cov, false));
}
} finally {
IOUtils.closeQuietly(reader);
}
}
}
return count;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Override
protected CommandLine processOptions(ObjectInspector[] argOIs) throws UDFArgumentException {
CommandLine cl = super.processOptions(argOIs);
this.eta0 = Primitives.parseFloat(cl.getOptionValue("eta0"), 0.1f);
this.eps = Primitives.parseFloat(cl.getOptionValue("eps"), 1.f);
this.scaling = Primitives.parseFloat(cl.getOptionValue("scale"), 100f);
return cl;
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
#fixed code
@Override
protected CommandLine processOptions(ObjectInspector[] argOIs) throws UDFArgumentException {
CommandLine cl = super.processOptions(argOIs);
if(cl == null) {
this.eta0 = 0.1f;
this.eps = 1.f;
this.scaling = 100f;
} else {
this.eta0 = Primitives.parseFloat(cl.getOptionValue("eta0"), 0.1f);
this.eps = Primitives.parseFloat(cl.getOptionValue("eps"), 1.f);
this.scaling = Primitives.parseFloat(cl.getOptionValue("scale"), 100f);
}
return cl;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
private boolean executeOperation(Operation currentOperation) throws VMRuntimeException {
if(IP < 0)
return false;
switch (currentOperation.op) {
case GOTO:
if(isInt(currentOperation.operand))
IP = Integer.parseInt(currentOperation.operand);
else
IP = jumpMap.get(currentOperation.operand);
break;
case CALL:
double candidateIP = valuesMap.get(currentOperation.operand);
if(candidateIP < 0) {
evaluateBuiltinByName(currentOperation.operand);
IP++;
}
break;
case IFEQ:
// follow the rule of smile's Math class.
double a = pop();
double b = pop();
double absa = Math.abs(a);
double absb = Math.abs(b);
if(a == b || Math.abs(a - b) <= Math.min(absa, absb) * 2.2204460492503131e-16)
if(isInt(currentOperation.operand))
IP = Integer.parseInt(currentOperation.operand);
else
IP = jumpMap.get(currentOperation.operand);
else
IP++;
break;
case IFGR:
double lower = pop();
double upper = pop();
if(upper > lower)
if(isInt(currentOperation.operand))
IP = Integer.parseInt(currentOperation.operand);
else
IP = jumpMap.get(currentOperation.operand);
else
IP++;
break;
case POP:
valuesMap.put(currentOperation.operand, pop());
IP++;
break;
case PUSH:
if(isDouble(currentOperation.operand))
push(Double.parseDouble(currentOperation.operand));
else {
Double v = valuesMap.get(currentOperation.operand);
if(v == null) {
throw new VMRuntimeException("value is not binded: "
+ currentOperation.operand);
}
push(v);
}
IP++;
break;
default:
throw new IllegalArgumentException("Machine code has wrong opcode :"
+ currentOperation.op);
}
return true;
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
#fixed code
private boolean executeOperation(Operation currentOperation) throws VMRuntimeException {
if(IP < 0) {
return false;
}
switch (currentOperation.op) {
case GOTO: {
if(isInt(currentOperation.operand)) {
IP = Integer.parseInt(currentOperation.operand);
} else {
IP = jumpMap.get(currentOperation.operand);
}
break;
}
case CALL: {
double candidateIP = valuesMap.get(currentOperation.operand);
if(candidateIP < 0) {
evaluateBuiltinByName(currentOperation.operand);
IP++;
}
break;
}
case IFEQ: {
// follow the rule of smile's Math class.
double a = pop();
double b = pop();
double absa = Math.abs(a);
double absb = Math.abs(b);
if(a == b || Math.abs(a - b) <= Math.min(absa, absb) * 2.2204460492503131e-16) {
if(isInt(currentOperation.operand)) {
IP = Integer.parseInt(currentOperation.operand);
} else {
IP = jumpMap.get(currentOperation.operand);
}
} else {
IP++;
}
break;
}
case IFGR: {
double lower = pop();
double upper = pop();
if(upper > lower) {
if(isInt(currentOperation.operand)) {
IP = Integer.parseInt(currentOperation.operand);
} else {
IP = jumpMap.get(currentOperation.operand);
}
} else {
IP++;
}
break;
}
case POP: {
valuesMap.put(currentOperation.operand, pop());
IP++;
break;
}
case PUSH: {
if(isDouble(currentOperation.operand))
push(Double.parseDouble(currentOperation.operand));
else {
Double v = valuesMap.get(currentOperation.operand);
if(v == null) {
throw new VMRuntimeException("value is not binded: "
+ currentOperation.operand);
}
push(v);
}
IP++;
break;
}
default:
throw new VMRuntimeException("Machine code has wrong opcode :"
+ currentOperation.op);
}
return true;
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Nonnull
public byte[] predictSerCodegen(boolean compress) throws HiveException {
final Attribute[] attrs = _attributes;
assert (attrs != null);
FastMultiByteArrayOutputStream bos = new FastMultiByteArrayOutputStream();
OutputStream wrapped = compress ? new DeflaterOutputStream(bos) : bos;
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(wrapped);
_root.writeExternal(oos);
oos.flush();
} catch (IOException ioe) {
throw new HiveException("IOException cause while serializing DecisionTree object", ioe);
} catch (Exception e) {
throw new HiveException("Exception cause while serializing DecisionTree object", e);
} finally {
IOUtils.closeQuietly(oos);
}
return bos.toByteArray_clear();
}
#location 20
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Nonnull
public byte[] predictSerCodegen(boolean compress) throws HiveException {
try {
if (compress) {
return ObjectUtils.toCompressedBytes(_root);
} else {
return ObjectUtils.toBytes(_root);
}
} catch (IOException ioe) {
throw new HiveException("IOException cause while serializing DecisionTree object", ioe);
} catch (Exception e) {
throw new HiveException("Exception cause while serializing DecisionTree object", e);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
@Nonnull
public byte[] predictSerCodegen(boolean compress) throws HiveException {
final Attribute[] attrs = _attributes;
assert (attrs != null);
FastMultiByteArrayOutputStream bos = new FastMultiByteArrayOutputStream();
OutputStream wrapped = compress ? new DeflaterOutputStream(bos) : bos;
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(wrapped);
_root.writeExternal(oos);
oos.flush();
} catch (IOException ioe) {
throw new HiveException("IOException cause while serializing DecisionTree object", ioe);
} catch (Exception e) {
throw new HiveException("Exception cause while serializing DecisionTree object", e);
} finally {
IOUtils.closeQuietly(oos);
}
return bos.toByteArray_clear();
}
#location 18
#vulnerability type RESOURCE_LEAK
|
#fixed code
@Nonnull
public byte[] predictSerCodegen(boolean compress) throws HiveException {
try {
if (compress) {
return ObjectUtils.toCompressedBytes(_root);
} else {
return ObjectUtils.toBytes(_root);
}
} catch (IOException ioe) {
throw new HiveException("IOException cause while serializing DecisionTree object", ioe);
} catch (Exception e) {
throw new HiveException("Exception cause while serializing DecisionTree object", e);
}
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#vulnerable code
public static <T> T readObject(@Nonnull final byte[] obj) throws IOException,
ClassNotFoundException {
return readObject(new FastByteArrayInputStream(obj));
}
#location 3
#vulnerability type RESOURCE_LEAK
|
#fixed code
public static <T> T readObject(@Nonnull final byte[] obj) throws IOException,
ClassNotFoundException {
return readObject(obj, obj.length);
}
|
Below is the vulnerable code, please generate the patch based on the following information.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.